hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8a647a2890756b639fbfdfa00e8be3e61401cfc5 | 188 | py | Python | unitorch/models/unilm/__init__.py | fuliucansheng/UniTorch | 47038321593ce4e7eabda555bd58c0cf89482146 | [
"MIT"
] | 2 | 2022-02-05T08:52:00.000Z | 2022-03-27T07:01:34.000Z | unitorch/models/unilm/__init__.py | Lixin-Qian/unitorch | 47038321593ce4e7eabda555bd58c0cf89482146 | [
"MIT"
] | null | null | null | unitorch/models/unilm/__init__.py | Lixin-Qian/unitorch | 47038321593ce4e7eabda555bd58c0cf89482146 | [
"MIT"
] | 1 | 2022-03-27T07:01:13.000Z | 2022-03-27T07:01:13.000Z | # Copyright (c) FULIUCANSHENG.
# Licensed under the MIT License.
from unitorch.models.unilm.modeling import UnilmForGeneration
from unitorch.models.unilm.processing import UnilmProcessor
| 31.333333 | 61 | 0.835106 | 22 | 188 | 7.136364 | 0.772727 | 0.152866 | 0.229299 | 0.292994 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101064 | 188 | 5 | 62 | 37.6 | 0.928994 | 0.319149 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
8a84b95010908975c7dee8c0958ba66767bac3b5 | 2,502 | py | Python | src/tests/test_commands_cmdsethandler.py | reddcoin-project/ReddConnect | 5c212683de6b80b81fd15ed05239c3a1b46c3afd | [
"BSD-3-Clause"
] | 5 | 2015-01-30T08:47:59.000Z | 2022-01-22T19:27:03.000Z | src/tests/test_commands_cmdsethandler.py | reddcoin-project/ReddConnect | 5c212683de6b80b81fd15ed05239c3a1b46c3afd | [
"BSD-3-Clause"
] | 2 | 2017-12-28T21:36:48.000Z | 2017-12-28T21:36:57.000Z | src/tests/test_commands_cmdsethandler.py | reddcoin-project/ReddConnect | 5c212683de6b80b81fd15ed05239c3a1b46c3afd | [
"BSD-3-Clause"
] | 1 | 2019-01-05T15:51:37.000Z | 2019-01-05T15:51:37.000Z | import unittest
class TestImportCmdset(unittest.TestCase):
def test_import_cmdset(self):
# self.assertEqual(expected, import_cmdset(python_path, cmdsetobj, emit_to_obj, no_logging))
assert True # TODO: implement your test here
class TestCmdSetHandler(unittest.TestCase):
def test___init__(self):
# cmd_set_handler = CmdSetHandler(obj)
assert True # TODO: implement your test here
def test___str__(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.__str__())
assert True # TODO: implement your test here
def test_add(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.add(cmdset, emit_to_obj, permanent))
assert True # TODO: implement your test here
def test_add_default(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.add_default(cmdset, emit_to_obj, permanent))
assert True # TODO: implement your test here
def test_all(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.all())
assert True # TODO: implement your test here
def test_clear(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.clear())
assert True # TODO: implement your test here
def test_delete(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.delete(cmdset))
assert True # TODO: implement your test here
def test_delete_default(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.delete_default())
assert True # TODO: implement your test here
def test_has_cmdset(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.has_cmdset(cmdset_key, must_be_default))
assert True # TODO: implement your test here
def test_reset(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.reset())
assert True # TODO: implement your test here
def test_update(self):
# cmd_set_handler = CmdSetHandler(obj)
# self.assertEqual(expected, cmd_set_handler.update(init_mode))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
| 38.492308 | 100 | 0.693046 | 310 | 2,502 | 5.290323 | 0.154839 | 0.076829 | 0.166463 | 0.168293 | 0.807927 | 0.807927 | 0.787805 | 0.745122 | 0.745122 | 0.668293 | 0 | 0 | 0.221023 | 2,502 | 64 | 101 | 39.09375 | 0.841457 | 0.603917 | 0 | 0.413793 | 0 | 0 | 0.008368 | 0 | 0 | 0 | 0 | 0.015625 | 0.413793 | 1 | 0.413793 | false | 0 | 0.103448 | 0 | 0.586207 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
8a9dfe233bb3062d0b6807694fc4fd39606761e5 | 42,749 | py | Python | src/unity/python/turicreate/test/test_linear_regression.py | TimothyRHuertas/turicreate | afa00bee56d168190c6f122e14c9fbc6656b4e97 | [
"BSD-3-Clause"
] | 1 | 2018-02-10T12:05:13.000Z | 2018-02-10T12:05:13.000Z | src/unity/python/turicreate/test/test_linear_regression.py | TimothyRHuertas/turicreate | afa00bee56d168190c6f122e14c9fbc6656b4e97 | [
"BSD-3-Clause"
] | null | null | null | src/unity/python/turicreate/test/test_linear_regression.py | TimothyRHuertas/turicreate | afa00bee56d168190c6f122e14c9fbc6656b4e97 | [
"BSD-3-Clause"
] | 1 | 2020-10-21T17:46:28.000Z | 2020-10-21T17:46:28.000Z | # -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
import unittest
import turicreate as tc
import sys
import operator as op
import uuid
import os
import array
from sklearn import linear_model
import statsmodels.formula.api as sm
import shutil
# Check answers
import pandas as pd
import numpy as np
from turicreate.toolkits._main import ToolkitError
from turicreate.toolkits.regression.linear_regression import _DEFAULT_SOLVER_OPTIONS
import sys
if sys.version_info.major == 3:
from functools import reduce
import os as _os
_lfs = _os.environ['LFS_ROOT']
class LinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression model that has already
been created.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
# Simulate test data
np.random.seed(42)
n, d = 100, 10
self.sf = tc.SFrame()
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
target = np.random.rand(n)
self.sf['target'] = target
## Compute the correct answers with statsmodels
formula = 'target ~ ' + \
' + '.join(['X{}'.format(i) for i in range(1, d+1)])
df = self.sf.to_dataframe()
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.coef = list(sm_model.params)
self.stderr = list(sm_model.bse)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
self.maxerr = abs(target - np.array(self.yhat)).max()
## Create the turicreate model
self.def_kwargs = _DEFAULT_SOLVER_OPTIONS
self.solver = 'auto'
self.unpacked_features = ['X{}'.format(i) for i in range(1, d+1)]
self.features = ['X{}'.format(i) for i in range(1, d+1)]
self.target = 'target'
self.def_opts = dict(list(self.def_kwargs.items()) + list({'solver' : 'auto',
'feature_rescaling' : True,
'l1_penalty' : 0,
'l2_penalty': 1e-2}.items()))
self.opts = self.def_opts.copy()
self.opts['l2_penalty'] = 0.0
self.opts['solver'] = 'newton'
self.model = tc.linear_regression.create(self.sf,
target=self.target,
features=None,
l2_penalty = 0.0,
l1_penalty = 0.0,
feature_rescaling = True,
validation_set = None,
solver = self.solver)
# self.maxerr = abs(np.array(y) - self.predict_ans).max()
self.evaluate_ans = {
'max_error': lambda x: abs(x - self.maxerr) < 1e-3,
'evaluate_time': lambda x: x > 0,
'rmse': lambda x: abs(x - self.rmse) < 1e-3,
}
# Answers
# ------------------------------------------------------------------------
self.get_ans = {
'coefficients': lambda x: isinstance(x, tc.SFrame),
'convergence_threshold': lambda x: x == self.opts['convergence_threshold'],
'features': lambda x: x == self.features,
'unpacked_features': lambda x: x == self.unpacked_features,
'feature_rescaling': lambda x: x == self.opts['feature_rescaling'],
'l1_penalty': lambda x: x == 0.0 ,
'l2_penalty': lambda x: x == 0.0 ,
'lbfgs_memory_level': lambda x: x == self.opts['lbfgs_memory_level'],
'max_iterations': lambda x: x == self.opts['max_iterations'],
'num_coefficients': lambda x: x == 11 ,
'num_examples': lambda x: x == 100,
'num_features': lambda x: x == 10 ,
'num_unpacked_features': lambda x: x == 10 ,
'progress': lambda x: isinstance(x, tc.SFrame),
'solver': lambda x: x == self.opts['solver'],
'training_solver_status': lambda x: x == "SUCCESS: Optimal solution found.",
'step_size': lambda x: x == self.opts['step_size'],
'target': lambda x: x == self.target,
'training_iterations': lambda x: x > 0,
'training_loss': lambda x: abs(x - self.loss) < 1e-5,
'training_rmse': lambda x: abs(x - self.rmse) < 1e-5,
'training_time': lambda x: x >= 0}
self.fields_ans = self.get_ans.keys()
def test__list_fields(self):
"""
Check the _list_fields function. Compare with the answer.
"""
model = self.model
fields = model._list_fields()
self.assertEqual(set(fields), set(self.fields_ans))
def test_get(self):
"""
Check the get function. Compare with the answer supplied as a lambda
function for each field.
"""
model = self.model
for field in self.fields_ans:
ans = model._get(field)
self.assertTrue(self.get_ans[field](ans), \
'''Get failed in field {}. Output was {}.'''.format(field, ans))
def test_coefficients(self, test_stderr = True):
"""
Check that the coefficient values are very close to the correct values.
"""
model = self.model
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-03, atol=1e-03))
if test_stderr:
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
else:
self.assertTrue('stderr' in coefs.column_names())
self.assertEqual(list(coefs['stderr']), [None for v in coef_list])
def test_summary(self):
"""
Check the summary function. Compare with the answer supplied as
a lambda function for each field. Uses the same answers as test_get.
"""
model = self.model
model.summary()
def test_repr(self):
"""
Check the repr function.
"""
model = self.model
ans = str(model)
self.assertEqual(type(ans), str)
def test_predict(self):
"""
Check the prediction function with precomputed answers. Check that all
predictions are atmost 1e-5 away from the true answers.
"""
model = self.model
ans = model.predict(self.sf)
reduce(op.and_, map(lambda x,y: abs(x-y) < 1e-5, ans, self.yhat))
# Test extra col
self.sf['extra_col'] = 1
ans = model.predict(self.sf)
reduce(op.and_, map(lambda x,y: abs(x-y) < 1e-5, ans, self.yhat))
del self.sf['extra_col']
def test_evaluate(self):
"""
Check the evaluation function with precomputed answers.
"""
model = self.model
ans = model.evaluate(self.sf)
def check_ans():
for field in ans:
self.assertTrue(self.evaluate_ans[field](ans[field]), \
'''Evaluation failed in field {}. Output was {}'''.format(\
field, ans[field]))
check_ans()
rmse = model.evaluate(self.sf, metric = 'rmse')
check_ans()
max_error = model.evaluate(self.sf, metric = 'max_error')
check_ans()
def test_save_and_load(self):
"""
Make sure saving and loading retains things.
"""
filename = 'save_file%s' % (str(uuid.uuid4()))
self.model.save(filename)
self.model = tc.load_model(filename)
try:
self.test_coefficients()
print("Coefs passed")
self.test_summary()
print("Summary passed")
self.test_repr()
print("Repr passed")
self.test_predict()
print("Predict passed")
self.test_evaluate()
print("Evaluate passed")
self.test__list_fields()
print("List field passed")
self.test_get()
print("Get passed")
shutil.rmtree(filename)
except:
self.assertTrue(False, "Failed during save & load diagnostics")
class LinearRegressionCreateTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
# Simulate test data
np.random.seed(42)
n, d = 100, 10
self.sf = tc.SFrame()
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
target = np.random.rand(n)
self.sf['target'] = target
## Compute the correct answers with statsmodels
formula = 'target ~ ' + \
' + '.join(['X{}'.format(i) for i in range(1, d+1)])
df = self.sf.to_dataframe()
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.coef = list(sm_model.params)
self.stderr = list(sm_model.bse)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
self.maxerr = abs(target - np.array(self.yhat)).max()
## Create the turicreate model
self.def_kwargs = _DEFAULT_SOLVER_OPTIONS
self.solver = 'newton'
self.features = ', '.join(['X{}'.format(i) for i in range(1, d+1)])
self.target = 'target'
def _test_coefficients(self, model, test_case, test_stderr):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
if test_stderr:
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
else:
self.assertTrue('stderr' in coefs.column_names())
self.assertEqual(list(coefs['stderr']), [None for v in coef_list])
"""
test linear regression create.
"""
def _test_create_no_rescaling(self, sf, target, solver, kwargs):
model = tc.linear_regression.create(self.sf,
target=self.target,
features=None,
l2_penalty = 0.0,
l1_penalty = 0.0,
solver = solver,
feature_rescaling = False,
validation_set = None,
**kwargs)
test_case = 'solver = {solver}, kwargs = {kwargs}'.format(solver = solver,
kwargs = kwargs)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self._test_coefficients(model, test_case, solver == 'newton')
"""
test linear regression create.
"""
def _test_create(self, sf, target, solver, kwargs):
model = tc.linear_regression.create(self.sf,
target=self.target,
features=None,
l2_penalty = 0.0,
l1_penalty = 0.0,
solver = solver,
feature_rescaling = True,
validation_set = None,
**kwargs)
test_case = 'solver = {solver}, kwargs= {kwargs}'.format(solver = solver,
kwargs = kwargs)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self.assertTrue(abs(model.training_loss - self.loss) < 0.1,
'loss failed: %s' % test_case)
self._test_coefficients(model, test_case, solver == 'newton')
"""
Test linear regression create.
"""
def test_create(self):
kwargs = self.def_kwargs.copy()
kwargs['convergence_threshold'] = 1e-6
kwargs['max_iterations'] = 100
for solver in ['newton', 'fista', 'lbfgs']:
args = (self.sf, self.target, solver, kwargs)
self._test_create(*args)
self._test_create_no_rescaling(*args)
"""
Test linear regression create.
"""
def test_lbfgs(self):
for m in [5,21]:
kwargs = self.def_kwargs.copy()
kwargs.update({'lbfgs_memory_level': m})
kwargs['max_iterations'] = 100
args = (self.sf, self.target, 'lbfgs', kwargs)
self._test_create(*args)
self._test_create_no_rescaling(*args)
"""
Test detection of columns that are almost the same.
"""
def test_zero_variance_detection(self):
sf = self.sf
try:
sf['error-column'] = 1
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
try:
sf['error-column'] = '1'
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
try:
sf['error-column'] = [[1] for i in sf]
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
try:
sf['error-column'] = [{1:1} for i in sf]
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
del sf['error-column']
"""
Test detection of columns with nan values
"""
def test_nan_detection(self):
sf = self.sf
try:
sf['error-column'] = np.nan
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
try:
sf['error-column'] = [[np.nan] for i in sf]
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
try:
sf['error-column'] = [{1:np.nan} for i in sf]
model = tc.linear_regression.create(sf, self.target)
except ToolkitError:
pass
del sf['error-column']
class VectorLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
np.random.seed(15)
n, d = 100, 3
self.sf = tc.SFrame()
# float columns
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
# target column
self.sf['target'] = np.random.randint(2, size=n)
## Get the right answer with statsmodels
df = self.sf.to_dataframe()
formula = 'target ~ ' + \
' + '.join(['X{}'.format(i+1) for i in range(d)])
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.coef = list(sm_model.params)
self.stderr = list(sm_model.bse)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
## Set the turicreate model params
self.target = 'target'
self.sf['vec'] = self.sf.apply(lambda row: [row['X{}'.format(i+1)] for i in
range(d)])
self.sf['vec'] = self.sf['vec'].apply(lambda x:x, array.array)
self.features = ['vec']
self.unpacked_features = ['vec[%s]' % (i) for i in range(d)]
self.def_kwargs= _DEFAULT_SOLVER_OPTIONS
def _test_coefficients(self, model):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
def _test_create(self, sf, target, features, solver,
opts, rescaling):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l2_penalty = 0.0, feature_rescaling = rescaling,
validation_set = None,
**opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self.assertTrue(abs(model.training_loss - self.loss) < 0.1,
'loss failed: %s' % test_case)
self._test_coefficients(model)
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['newton']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, True)
self._test_create(*args)
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, False)
self._test_create(*args)
def test_features(self):
model = tc.linear_regression.create(self.sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
self.assertEqual(model.num_features, len(self.features))
self.assertEqual(model.features, self.features)
self.assertEqual(model.num_unpacked_features, len(self.unpacked_features))
self.assertEqual(model.unpacked_features, self.unpacked_features)
class DictLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
np.random.seed(15)
n, d = 100, 3
self.d = d
self.sf = tc.SFrame()
# float columns
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
# target column
self.sf['target'] = np.random.randint(2, size=n)
## Get the right answer with statsmodels
df = self.sf.to_dataframe()
formula = 'target ~ ' + \
' + '.join(['X{}'.format(i+1) for i in range(d)])
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.coef = list(sm_model.params)
self.stderr = list(sm_model.bse)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
## Set the turicreate model params
self.target = 'target'
self.sf['dict'] = self.sf.apply(lambda row: {i: row['X{}'.format(i+1)] for i in
range(d)})
self.features = ['dict']
self.unpacked_features = ['dict[%s]' % i for i in range(d)]
self.def_kwargs = {
'convergence_threshold': 1e-5,
'step_size': 1.0,
'max_iterations': 100,
}
def _test_coefficients(self, model):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
def _test_create(self, sf, target, features, solver,
opts, rescaling):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l2_penalty = 0.0, feature_rescaling = rescaling,
validation_set = None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self.assertTrue(abs(model.training_loss - self.loss) < 0.1,
'loss failed: %s' % test_case)
self._test_coefficients(model)
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['newton']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, True)
self._test_create(*args)
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, False)
self._test_create(*args)
def test_features(self):
d = self.d
self.sf['dict'] = self.sf.apply(lambda row: {i: row['X{}'.format(i+1)] for i in
range(d)})
model = tc.linear_regression.create(self.sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
self.assertEqual(model.num_features, len(self.features))
self.assertEqual(model.features, self.features)
self.assertEqual(model.num_unpacked_features, len(self.unpacked_features))
self.assertEqual(model.unpacked_features, self.unpacked_features)
def test_predict_extra_cols(self):
sf = self.sf[:]
model = tc.linear_regression.create(sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
pred = model.predict(sf)
sf['dict'] = sf['dict'].apply(lambda x: dict(list(x.items())
+ list({'extra_col': 0, 'extra_col_2': 1}.items())))
pred2 = model.predict(sf)
self.assertEqual(list(pred), list(pred2))
def test_evaluate_extra_cols(self):
sf = self.sf[:]
model = tc.linear_regression.create(sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
eval1 = model.evaluate(sf)
sf['dict'] = sf['dict'].apply(lambda x: dict(list(x.items())
+ list({'extra_col': 0, 'extra_col_2': 1}.items())))
eval2 = model.evaluate(sf)
self.assertEqual(eval1, eval2)
class ListCategoricalLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
## Create fake data with a categorical variable
np.random.seed(15)
n, d = 100, 3
self.sf = tc.SFrame()
# float columns
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
# categorical column
species = np.array(['cat', 'dog', 'foosa'])
idx = np.random.randint(3, size=n)
# Stats models maps categorical in alphabetical order of categories.
# We do it in the order of appearance. These three lines of code
# ensures that the two are the same.
idx[0] = 0
idx[1] = 1
idx[2] = 2
self.sf['species'] = list(species[idx])
# target column
self.sf['target'] = np.random.randint(2, size=n)
## Get the right answer with statsmodels
df = self.sf.to_dataframe()
formula = 'target ~ species + ' + \
' + '.join(['X{}'.format(i+1) for i in range(d)])
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.stderr = list(sm_model.bse)
self.coef = list(sm_model.params)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
## Set the turicreate model params
self.target = 'target'
self.features = ['species', 'X1', 'X2', 'X3']
self.unpacked_features = ['species', 'X1', 'X2', 'X3']
self.sf['species'] = self.sf["species"].apply(lambda x: [x])
self.def_kwargs = {
'convergence_threshold': 1e-5,
'step_size': 1.0,
'max_iterations': 100,
}
def _test_coefficients(self, model, test_stderr):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
if test_stderr:
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
else:
self.assertTrue('stderr' in coefs.column_names())
self.assertEqual(list(coefs['stderr']), [None for v in coef_list])
def _test_create(self, sf, target, features, solver, opts, rescaling):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l2_penalty = 0.0, feature_rescaling = rescaling,
validation_set = None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_loss - self.loss) < 0.1,
'loss failed: %s' % test_case)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self._test_coefficients(model, solver == 'newton')
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['newton', 'lbfgs', 'fista']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, True)
self._test_create(*args)
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, False)
self._test_create(*args)
class CategoricalLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
## Create fake data with a categorical variable
np.random.seed(15)
n, d = 100, 3
self.sf = tc.SFrame()
# float columns
for i in range(d):
self.sf.add_column(tc.SArray(np.random.rand(n)), inplace=True)
# categorical column
species = np.array(['cat', 'dog', 'foosa'])
idx = np.random.randint(3, size=n)
# Stats models maps categorical in alphabetical order of categories.
# We do it in the order of appearance. These three lines of code
# ensures that the two are the same.
idx[0] = 0
idx[1] = 1
idx[2] = 2
self.sf['species'] = list(species[idx])
# target column
self.sf['target'] = np.random.randint(2, size=n)
## Get the right answer with statsmodels
df = self.sf.to_dataframe()
formula = 'target ~ species + ' + \
' + '.join(['X{}'.format(i+1) for i in range(d)])
sm_model = sm.ols(formula, data=df).fit()
self.loss = sm_model.ssr # sum of squared residuals
self.stderr = list(sm_model.bse)
self.coef = list(sm_model.params)
self.yhat = list(sm_model.fittedvalues)
self.rmse = np.sqrt(sm_model.ssr / float(n))
## Set the turicreate model params
self.target = 'target'
self.features = ['species', 'X1', 'X2', 'X3']
self.unpacked_features = ['species', 'X1', 'X2', 'X3']
self.def_kwargs = {
'convergence_threshold': 1e-5,
'step_size': 1.0,
'max_iterations': 100,
}
def _test_coefficients(self, model, test_stderr):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
if test_stderr:
stderr_list = list(coefs['stderr'])
self.assertTrue(np.allclose(stderr_list, self.stderr, rtol=1e-03, atol=1e-03))
else:
self.assertTrue('stderr' in coefs.column_names())
self.assertEqual(list(coefs['stderr']), [None for v in coef_list])
def _test_create(self, sf, target, features, solver, opts, rescaling):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l2_penalty = 0.0, feature_rescaling = rescaling,
validation_set = None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_loss - self.loss) < 0.1,
'loss failed: %s' % test_case)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self._test_coefficients(model, solver == 'newton')
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['newton', 'lbfgs', 'fista']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, True)
self._test_create(*args)
args = (self.sf, self.target, self.features,
solver, self.def_kwargs, False)
self._test_create(*args)
def test_predict_extra_cols(self):
model = tc.linear_regression.create(self.sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
self.sf['species'] = self.sf['species'].apply(lambda x: x if x != 'foosa'
else 'rat')
pred = model.predict(self.sf)
def test_evaluate_extra_cols(self):
model = tc.linear_regression.create(self.sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
self.sf['species'] = self.sf['species'].apply(lambda x: x if x != 'foosa'
else 'rat')
pred = model.evaluate(self.sf)
def test_features(self):
model = tc.linear_regression.create(self.sf, self.target, self.features,
feature_rescaling = False, validation_set = None)
self.assertEqual(model.num_features, len(self.features))
self.assertEqual(model.features, self.features)
self.assertEqual(model.num_unpacked_features, len(self.unpacked_features))
self.assertEqual(model.unpacked_features, self.unpacked_features)
class L1LinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
test_data = '''y,0,1,2,3,4
38,0,3,1,0,1.47
58,1,2,2,8,4.38
30,1,1,1,0,1.64
50,1,1,3,0,2.54
49,1,1,3,1,2.06
45,0,3,1,4,4.76
42,1,1,2,0,3.05
59,0,3,3,3,2.73
47,1,2,1,0,3.14
34,0,1,1,3,4.42
53,0,2,3,0,2.36
35,1,1,1,1,4.29
42,0,1,2,2,3.81
42,0,1,2,2,3.84
51,0,3,2,7,3.15
51,1,2,1,8,5.07
40,0,1,2,3,2.73
48,1,2,1,1,3.56
34,1,1,1,7,3.54
46,1,2,1,2,2.71
45,0,1,2,6,5.18
50,1,1,3,2,2.66
61,0,3,3,3,3.7
62,1,3,1,2,3.75
51,0,1,3,8,3.96
59,0,3,3,0,2.88
65,1,2,3,5,3.37
49,0,1,3,0,2.84
37,1,1,1,9,5.12'''
dataset = 'data_file%s.csv' % (str(uuid.uuid4()))
self.dataset = dataset
f = open(dataset, 'w')
f.write(test_data)
f.close()
self.def_kwargs = {'convergence_threshold': 1e-5,
'max_iterations': 1000}
self.features = ['0', '1', '2', '3', '4']
self.target = 'y'
type_dict = {n: float for n in self.features
+ [self.target]}
self.sf = tc.SFrame.read_csv(dataset, header=True, delimiter=',',
column_type_hints = type_dict)
# Check answers with Numpy calculations
# ------------------------------------------------------------------------
feature_matrix = np.genfromtxt(dataset, delimiter=',', skip_header=1)
X = feature_matrix[:, 1:]
y = feature_matrix[:, 0]
self.examples = X.shape[0]
# Fit the model
self.l1_penalty = 10.0
clf = linear_model.ElasticNet(alpha=self.l1_penalty/(2*self.examples),
l1_ratio=1)
clf.fit(X, y)
self.coef = np.append(clf.intercept_, clf.coef_)
self.predictions = clf.predict(X)
self.loss = np.dot(self.predictions - y, self.predictions - y)
self.rmse = np.sqrt(self.loss/self.examples)
@classmethod
def tearDownClass(self):
os.remove(self.dataset)
def _test_coefficients(self, model):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-02, atol=1e-02),
"%s vs %s" % (coef_list, self.coef))
def _test_create(self, sf, target, features, solver,
opts):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l1_penalty=self.l1_penalty, l2_penalty = 0.0, feature_rescaling=False,
validation_set=None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self._test_coefficients(model)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
def test_create(self):
for solver in ['fista']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs)
self._test_create(*args)
class L2LinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
test_data = '''y,0,1,2,3,4
38,0,3,1,0,1.47
58,1,2,2,8,4.38
30,1,1,1,0,1.64
50,1,1,3,0,2.54
49,1,1,3,1,2.06
45,0,3,1,4,4.76
42,1,1,2,0,3.05
59,0,3,3,3,2.73
47,1,2,1,0,3.14
34,0,1,1,3,4.42
53,0,2,3,0,2.36
35,1,1,1,1,4.29
42,0,1,2,2,3.81
42,0,1,2,2,3.84
51,0,3,2,7,3.15
51,1,2,1,8,5.07
40,0,1,2,3,2.73
48,1,2,1,1,3.56
34,1,1,1,7,3.54
46,1,2,1,2,2.71
45,0,1,2,6,5.18
50,1,1,3,2,2.66
61,0,3,3,3,3.7
62,1,3,1,2,3.75
51,0,1,3,8,3.96
59,0,3,3,0,2.88
65,1,2,3,5,3.37
49,0,1,3,0,2.84
37,1,1,1,9,5.12'''
dataset = 'data_file%s.csv' % (str(uuid.uuid4()))
self.dataset = dataset
f = open(dataset, 'w')
f.write(test_data)
f.close()
self.def_kwargs = {'convergence_threshold': 1e-5,
'step_size': 1.0,
'lbfgs_memory_level': 11,
'max_iterations': 1000}
self.features = ['0', '1', '2', '3', '4']
self.target = 'y'
type_dict = {n: float for n in self.features
+ [self.target]}
self.sf = tc.SFrame.read_csv(dataset, header=True, delimiter=',',
column_type_hints = type_dict)
# Check answers with Numpy calculations
# ------------------------------------------------------------------------
feature_matrix = np.genfromtxt(dataset, delimiter=',', skip_header=1)
X = feature_matrix[:, 1:]
y = feature_matrix[:, 0]
self.examples = X.shape[0]
self.variables = X.shape[1] + 1
# Fit the model
self.l2_penalty = 10.0
clf = linear_model.ElasticNet(alpha=self.l2_penalty/(self.examples),
l1_ratio=0)
clf.fit(X, y)
self.coef = np.append(clf.intercept_, clf.coef_)
self.predictions = clf.predict(X)
self.loss = np.dot(self.predictions - y, self.predictions - y)
self.rmse = np.sqrt(self.loss/self.examples)
@classmethod
def tearDownClass(self):
os.remove(self.dataset)
def _test_coefficients(self, model):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01))
def _test_create(self, sf, target, features, solver,
opts):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l2_penalty=self.l2_penalty, feature_rescaling = False,
validation_set=None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self._test_coefficients(model)
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['newton', 'lbfgs', 'fista']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs)
self._test_create(*args)
class ElasticNetLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing a Linear Regression create function.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
test_data = '''y,0,1,2,3,4
38,0,3,1,0,1.47
58,1,2,2,8,4.38
30,1,1,1,0,1.64
50,1,1,3,0,2.54
49,1,1,3,1,2.06
45,0,3,1,4,4.76
42,1,1,2,0,3.05
59,0,3,3,3,2.73
47,1,2,1,0,3.14
34,0,1,1,3,4.42
53,0,2,3,0,2.36
35,1,1,1,1,4.29
42,0,1,2,2,3.81
42,0,1,2,2,3.84
51,0,3,2,7,3.15
51,1,2,1,8,5.07
40,0,1,2,3,2.73
48,1,2,1,1,3.56
34,1,1,1,7,3.54
46,1,2,1,2,2.71
45,0,1,2,6,5.18
50,1,1,3,2,2.66
61,0,3,3,3,3.7
62,1,3,1,2,3.75
51,0,1,3,8,3.96
59,0,3,3,0,2.88
65,1,2,3,5,3.37
49,0,1,3,0,2.84
37,1,1,1,9,5.12'''
dataset = 'data_file%s.csv' % (str(uuid.uuid4()))
self.dataset = dataset
f = open(dataset, 'w')
f.write(test_data)
f.close()
self.def_kwargs = {'convergence_threshold': 1e-5,
'step_size': 1.0,
'lbfgs_memory_level': 3,
'max_iterations': 1000}
self.features = ['0', '1', '2', '3', '4']
self.target = 'y'
type_dict = {n: float for n in self.features
+ [self.target]}
self.sf = tc.SFrame.read_csv(dataset, header=True, delimiter=',',
column_type_hints = type_dict)
# Check answers with Numpy calculations
# ------------------------------------------------------------------------
feature_matrix = np.genfromtxt(dataset, delimiter=',', skip_header=1)
X = feature_matrix[:, 1:]
y = feature_matrix[:, 0]
self.examples = X.shape[0]
# Fit the model
self.penalty = 10.0
self.ratio = 0.5
clf = linear_model.ElasticNet(alpha=self.penalty/self.examples,
l1_ratio=0.5)
clf.fit(X, y)
self.coef = np.append(clf.intercept_, clf.coef_)
self.predictions = clf.predict(X)
self.loss = np.dot(self.predictions - y, self.predictions - y)
self.rmse = np.sqrt(self.loss/self.examples)
@classmethod
def tearDownClass(self):
os.remove(self.dataset)
def _test_coefficients(self, model):
"""
Check that the coefficient values are very close to the correct values.
"""
coefs = model.coefficients
coef_list = list(coefs['value'])
self.assertTrue(np.allclose(coef_list, self.coef, rtol=1e-01, atol=1e-01),
"%s vs %s" % (coef_list, self.coef))
def _test_create(self, sf, target, features, solver,
opts):
model = tc.linear_regression.create(sf, target, features, solver = solver,
l1_penalty = self.penalty, l2_penalty = 0.5 * self.penalty,
feature_rescaling = False, validation_set=None, **opts)
test_case = 'solver = {solver}, opts = {opts}'.format(solver = solver,
opts = opts)
self.assertTrue(model is not None)
self.assertTrue(abs(model.training_rmse - self.rmse) < 0.1,
'rmse failed: %s' % test_case)
self._test_coefficients(model)
"""
Test linear regression create.
"""
def test_create(self):
for solver in ['fista']:
args = (self.sf, self.target, self.features,
solver, self.def_kwargs)
self._test_create(*args)
class ValidationDataLinearRegressionTest(unittest.TestCase):
"""
Unit test class for testing create with validation data.
"""
@classmethod
def setUpClass(self):
"""
Set up (Run only once)
"""
test_data = '''y,0,1,2,3,4
38,0,3,1,0,1.47
58,1,2,2,8,4.38
30,1,1,1,0,1.64
50,1,1,3,0,2.54
49,1,1,3,1,2.06
45,0,3,1,4,4.76
42,1,1,2,0,3.05
59,0,3,3,3,2.73
47,1,2,1,0,3.14
34,0,1,1,3,4.42
53,0,2,3,0,2.36
35,1,1,1,1,4.29
42,0,1,2,2,3.81
42,0,1,2,2,3.84
51,0,3,2,7,3.15
51,1,2,1,8,5.07
40,0,1,2,3,2.73
48,1,2,1,1,3.56
34,1,1,1,7,3.54
46,1,2,1,2,2.71
45,0,1,2,6,5.18
50,1,1,3,2,2.66
61,0,3,3,3,3.7
62,1,3,1,2,3.75
51,0,1,3,8,3.96
59,0,3,3,0,2.88
65,1,2,3,5,3.37
49,0,1,3,0,2.84
37,1,1,1,9,5.12'''
dataset = 'data_file%s.csv' % (str(uuid.uuid4()))
self.dataset = dataset
f = open(dataset, 'w')
f.write(test_data)
f.close()
self.def_kwargs = {'convergence_threshold': 1e-4,
'step_size': 1.0,
'lbfgs_memory_level': 11,
'max_iterations': 200}
self.features = ['0', '1', '2', '3', '4']
self.target = 'y'
type_dict = {n: float for n in self.features + [self.target]}
self.sf = tc.SFrame.read_csv(dataset, header=True, delimiter=',',
column_type_hints = type_dict)
@classmethod
def tearDownClass(self):
os.remove(self.dataset)
def test_valid_set(self):
m = tc.linear_regression.create(self.sf, target=self.target,
validation_set=self.sf)
self.assertTrue(m is not None)
self.assertTrue(isinstance(m.progress, tc.SFrame))
m = tc.linear_regression.create(self.sf, target=self.target,
validation_set='auto')
self.assertTrue(m is not None)
self.assertTrue(isinstance(m.progress, tc.SFrame))
m = tc.linear_regression.create(self.sf, target=self.target,
validation_set=None)
self.assertTrue(m is not None)
self.assertTrue(isinstance(m.progress, tc.SFrame))
| 32.883846 | 92 | 0.572458 | 5,809 | 42,749 | 4.109657 | 0.069203 | 0.023876 | 0.041469 | 0.027144 | 0.845642 | 0.82763 | 0.807314 | 0.803963 | 0.798685 | 0.792653 | 0 | 0.049178 | 0.286018 | 42,749 | 1,299 | 93 | 32.909161 | 0.732947 | 0.094271 | 0 | 0.788526 | 0 | 0 | 0.137464 | 0.006783 | 0 | 0 | 0 | 0 | 0.079865 | 1 | 0.066367 | false | 0.015748 | 0.022497 | 0 | 0.100112 | 0.008999 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8aa2e20674bfab390425a9e64d4bd71e564e3c3c | 31,549 | py | Python | venv/Lib/site-packages/mkdocs/tests/structure/file_tests.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 2 | 2021-06-18T07:48:14.000Z | 2021-06-21T11:55:01.000Z | venv/Lib/site-packages/mkdocs/tests/structure/file_tests.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 4 | 2021-01-27T17:36:21.000Z | 2021-01-27T17:36:54.000Z | venv/Lib/site-packages/mkdocs/tests/structure/file_tests.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | null | null | null | import unittest
import os
from unittest import mock
from mkdocs.structure.files import Files, File, get_files, _sort_files, _filter_paths
from mkdocs.tests.base import load_config, tempdir, PathAssertionMixin
class TestFiles(PathAssertionMixin, unittest.TestCase):
def test_file_eq(self):
file = File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertTrue(file == File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False))
def test_file_ne(self):
file = File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
# Different filename
self.assertTrue(file != File('b.md', '/path/to/docs', '/path/to/site', use_directory_urls=False))
# Different src_path
self.assertTrue(file != File('a.md', '/path/to/other', '/path/to/site', use_directory_urls=False))
# Different URL
self.assertTrue(file != File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=True))
def test_sort_files(self):
self.assertEqual(
_sort_files(['b.md', 'bb.md', 'a.md', 'index.md', 'aa.md']),
['index.md', 'a.md', 'aa.md', 'b.md', 'bb.md']
)
self.assertEqual(
_sort_files(['b.md', 'index.html', 'a.md', 'index.md']),
['index.html', 'index.md', 'a.md', 'b.md']
)
self.assertEqual(
_sort_files(['a.md', 'index.md', 'b.md', 'index.html']),
['index.md', 'index.html', 'a.md', 'b.md']
)
self.assertEqual(
_sort_files(['.md', '_.md', 'a.md', 'index.md', '1.md']),
['index.md', '.md', '1.md', '_.md', 'a.md']
)
self.assertEqual(
_sort_files(['a.md', 'b.md', 'a.md']),
['a.md', 'a.md', 'b.md']
)
self.assertEqual(
_sort_files(['A.md', 'B.md', 'README.md']),
['README.md', 'A.md', 'B.md']
)
def test_md_file(self):
f = File('foo.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo.md')
self.assertPathsEqual(f.dest_path, 'foo.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo.html')
self.assertEqual(f.url, 'foo.html')
self.assertEqual(f.name, 'foo')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_file_use_directory_urls(self):
f = File('foo.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo.md')
self.assertPathsEqual(f.dest_path, 'foo/index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html')
self.assertEqual(f.url, 'foo/')
self.assertEqual(f.name, 'foo')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_file_nested(self):
f = File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/bar.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.md')
self.assertPathsEqual(f.dest_path, 'foo/bar.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html')
self.assertEqual(f.url, 'foo/bar.html')
self.assertEqual(f.name, 'bar')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_file_nested_use_directory_urls(self):
f = File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/bar.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.md')
self.assertPathsEqual(f.dest_path, 'foo/bar/index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar/index.html')
self.assertEqual(f.url, 'foo/bar/')
self.assertEqual(f.name, 'bar')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_index_file(self):
f = File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'index.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/index.md')
self.assertPathsEqual(f.dest_path, 'index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html')
self.assertEqual(f.url, 'index.html')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_readme_index_file(self):
f = File('README.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'README.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/README.md')
self.assertPathsEqual(f.dest_path, 'index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html')
self.assertEqual(f.url, 'index.html')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_index_file_use_directory_urls(self):
f = File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'index.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/index.md')
self.assertPathsEqual(f.dest_path, 'index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html')
self.assertEqual(f.url, '.')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_readme_index_file_use_directory_urls(self):
f = File('README.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'README.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/README.md')
self.assertPathsEqual(f.dest_path, 'index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html')
self.assertEqual(f.url, '.')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_index_file_nested(self):
f = File('foo/index.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/index.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/index.md')
self.assertPathsEqual(f.dest_path, 'foo/index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html')
self.assertEqual(f.url, 'foo/index.html')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_md_index_file_nested_use_directory_urls(self):
f = File('foo/index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/index.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/index.md')
self.assertPathsEqual(f.dest_path, 'foo/index.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html')
self.assertEqual(f.url, 'foo/')
self.assertEqual(f.name, 'index')
self.assertTrue(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_static_file(self):
f = File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/bar.html')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.html')
self.assertPathsEqual(f.dest_path, 'foo/bar.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html')
self.assertEqual(f.url, 'foo/bar.html')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertTrue(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_static_file_use_directory_urls(self):
f = File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/bar.html')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.html')
self.assertPathsEqual(f.dest_path, 'foo/bar.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html')
self.assertEqual(f.url, 'foo/bar.html')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertTrue(f.is_static_page())
self.assertFalse(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_media_file(self):
f = File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/bar.jpg')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.jpg')
self.assertPathsEqual(f.dest_path, 'foo/bar.jpg')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.jpg')
self.assertEqual(f.url, 'foo/bar.jpg')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_media_file_use_directory_urls(self):
f = File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/bar.jpg')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.jpg')
self.assertPathsEqual(f.dest_path, 'foo/bar.jpg')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.jpg')
self.assertEqual(f.url, 'foo/bar.jpg')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertFalse(f.is_css())
def test_javascript_file(self):
f = File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/bar.js')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.js')
self.assertPathsEqual(f.dest_path, 'foo/bar.js')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.js')
self.assertEqual(f.url, 'foo/bar.js')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertTrue(f.is_javascript())
self.assertFalse(f.is_css())
def test_javascript_file_use_directory_urls(self):
f = File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/bar.js')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.js')
self.assertPathsEqual(f.dest_path, 'foo/bar.js')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.js')
self.assertEqual(f.url, 'foo/bar.js')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertTrue(f.is_javascript())
self.assertFalse(f.is_css())
def test_css_file(self):
f = File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo/bar.css')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.css')
self.assertPathsEqual(f.dest_path, 'foo/bar.css')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.css')
self.assertEqual(f.url, 'foo/bar.css')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertTrue(f.is_css())
def test_css_file_use_directory_urls(self):
f = File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertPathsEqual(f.src_path, 'foo/bar.css')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.css')
self.assertPathsEqual(f.dest_path, 'foo/bar.css')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.css')
self.assertEqual(f.url, 'foo/bar.css')
self.assertEqual(f.name, 'bar')
self.assertFalse(f.is_documentation_page())
self.assertFalse(f.is_static_page())
self.assertTrue(f.is_media_file())
self.assertFalse(f.is_javascript())
self.assertTrue(f.is_css())
def test_file_name_with_space(self):
f = File('foo bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertPathsEqual(f.src_path, 'foo bar.md')
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo bar.md')
self.assertPathsEqual(f.dest_path, 'foo bar.html')
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo bar.html')
self.assertEqual(f.url, 'foo%20bar.html')
self.assertEqual(f.name, 'foo bar')
def test_files(self):
fs = [
File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True),
File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=True),
File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=True),
File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True),
File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=True),
File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True)
]
files = Files(fs)
self.assertEqual([f for f in files], fs)
self.assertEqual(len(files), 6)
self.assertEqual(files.documentation_pages(), [fs[0], fs[1]])
self.assertEqual(files.static_pages(), [fs[2]])
self.assertEqual(files.media_files(), [fs[3], fs[4], fs[5]])
self.assertEqual(files.javascript_files(), [fs[4]])
self.assertEqual(files.css_files(), [fs[5]])
self.assertEqual(files.get_file_from_path('foo/bar.jpg'), fs[3])
self.assertEqual(files.get_file_from_path('foo/bar.jpg'), fs[3])
self.assertEqual(files.get_file_from_path('missing.jpg'), None)
self.assertTrue(fs[2].src_path in files)
self.assertTrue(fs[2].src_path in files)
extra_file = File('extra.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertFalse(extra_file.src_path in files)
files.append(extra_file)
self.assertEqual(len(files), 7)
self.assertTrue(extra_file.src_path in files)
self.assertEqual(files.documentation_pages(), [fs[0], fs[1], extra_file])
@tempdir(files=[
'favicon.ico',
'index.md'
])
@tempdir(files=[
'base.html',
'favicon.ico',
'style.css',
'foo.md',
'README',
'.ignore.txt',
'.ignore/file.txt',
'foo/.ignore.txt',
'foo/.ignore/file.txt'
])
def test_add_files_from_theme(self, tdir, ddir):
config = load_config(docs_dir=ddir, theme={'name': None, 'custom_dir': tdir})
env = config['theme'].get_env()
files = get_files(config)
self.assertEqual(
[file.src_path for file in files],
['index.md', 'favicon.ico']
)
files.add_files_from_theme(env, config)
self.assertEqual(
[file.src_path for file in files],
['index.md', 'favicon.ico', 'style.css']
)
# Ensure theme file does not override docs_dir file
self.assertEqual(
files.get_file_from_path('favicon.ico').abs_src_path,
os.path.normpath(os.path.join(ddir, 'favicon.ico'))
)
def test_filter_paths(self):
# Root level file
self.assertFalse(_filter_paths('foo.md', 'foo.md', False, ['bar.md']))
self.assertTrue(_filter_paths('foo.md', 'foo.md', False, ['foo.md']))
# Nested file
self.assertFalse(_filter_paths('foo.md', 'baz/foo.md', False, ['bar.md']))
self.assertTrue(_filter_paths('foo.md', 'baz/foo.md', False, ['foo.md']))
# Wildcard
self.assertFalse(_filter_paths('foo.md', 'foo.md', False, ['*.txt']))
self.assertTrue(_filter_paths('foo.md', 'foo.md', False, ['*.md']))
# Root level dir
self.assertFalse(_filter_paths('bar', 'bar', True, ['/baz']))
self.assertFalse(_filter_paths('bar', 'bar', True, ['/baz/']))
self.assertTrue(_filter_paths('bar', 'bar', True, ['/bar']))
self.assertTrue(_filter_paths('bar', 'bar', True, ['/bar/']))
# Nested dir
self.assertFalse(_filter_paths('bar', 'foo/bar', True, ['/bar']))
self.assertFalse(_filter_paths('bar', 'foo/bar', True, ['/bar/']))
self.assertTrue(_filter_paths('bar', 'foo/bar', True, ['bar/']))
# Files that look like dirs (no extension). Note that `is_dir` is `False`.
self.assertFalse(_filter_paths('bar', 'bar', False, ['bar/']))
self.assertFalse(_filter_paths('bar', 'foo/bar', False, ['bar/']))
def test_get_relative_url_use_directory_urls(self):
to_files = [
'index.md',
'foo/index.md',
'foo/bar/index.md',
'foo/bar/baz/index.md',
'foo.md',
'foo/bar.md',
'foo/bar/baz.md'
]
to_file_urls = [
'.',
'foo/',
'foo/bar/',
'foo/bar/baz/',
'foo/',
'foo/bar/',
'foo/bar/baz/'
]
from_file = File('img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True)
expected = [
'img.jpg', # img.jpg relative to .
'../img.jpg', # img.jpg relative to foo/
'../../img.jpg', # img.jpg relative to foo/bar/
'../../../img.jpg', # img.jpg relative to foo/bar/baz/
'../img.jpg', # img.jpg relative to foo
'../../img.jpg', # img.jpg relative to foo/bar
'../../../img.jpg' # img.jpg relative to foo/bar/baz
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertEqual(from_file.url, 'img.jpg')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('foo/img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True)
expected = [
'foo/img.jpg', # foo/img.jpg relative to .
'img.jpg', # foo/img.jpg relative to foo/
'../img.jpg', # foo/img.jpg relative to foo/bar/
'../../img.jpg', # foo/img.jpg relative to foo/bar/baz/
'img.jpg', # foo/img.jpg relative to foo
'../img.jpg', # foo/img.jpg relative to foo/bar
'../../img.jpg' # foo/img.jpg relative to foo/bar/baz
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertEqual(from_file.url, 'foo/img.jpg')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('index.html', '/path/to/docs', '/path/to/site', use_directory_urls=True)
expected = [
'.', # . relative to .
'..', # . relative to foo/
'../..', # . relative to foo/bar/
'../../..', # . relative to foo/bar/baz/
'..', # . relative to foo
'../..', # . relative to foo/bar
'../../..' # . relative to foo/bar/baz
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertEqual(from_file.url, '.')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('file.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)
expected = [
'file/', # file relative to .
'../file/', # file relative to foo/
'../../file/', # file relative to foo/bar/
'../../../file/', # file relative to foo/bar/baz/
'../file/', # file relative to foo
'../../file/', # file relative to foo/bar
'../../../file/' # file relative to foo/bar/baz
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True)
self.assertEqual(from_file.url, 'file/')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
def test_get_relative_url(self):
to_files = [
'index.md',
'foo/index.md',
'foo/bar/index.md',
'foo/bar/baz/index.md',
'foo.md',
'foo/bar.md',
'foo/bar/baz.md'
]
to_file_urls = [
'index.html',
'foo/index.html',
'foo/bar/index.html',
'foo/bar/baz/index.html',
'foo.html',
'foo/bar.html',
'foo/bar/baz.html'
]
from_file = File('img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False)
expected = [
'img.jpg', # img.jpg relative to .
'../img.jpg', # img.jpg relative to foo/
'../../img.jpg', # img.jpg relative to foo/bar/
'../../../img.jpg', # img.jpg relative to foo/bar/baz/
'img.jpg', # img.jpg relative to foo.html
'../img.jpg', # img.jpg relative to foo/bar.html
'../../img.jpg' # img.jpg relative to foo/bar/baz.html
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertEqual(from_file.url, 'img.jpg')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('foo/img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False)
expected = [
'foo/img.jpg', # foo/img.jpg relative to .
'img.jpg', # foo/img.jpg relative to foo/
'../img.jpg', # foo/img.jpg relative to foo/bar/
'../../img.jpg', # foo/img.jpg relative to foo/bar/baz/
'foo/img.jpg', # foo/img.jpg relative to foo.html
'img.jpg', # foo/img.jpg relative to foo/bar.html
'../img.jpg' # foo/img.jpg relative to foo/bar/baz.html
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertEqual(from_file.url, 'foo/img.jpg')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('index.html', '/path/to/docs', '/path/to/site', use_directory_urls=False)
expected = [
'index.html', # index.html relative to .
'../index.html', # index.html relative to foo/
'../../index.html', # index.html relative to foo/bar/
'../../../index.html', # index.html relative to foo/bar/baz/
'index.html', # index.html relative to foo.html
'../index.html', # index.html relative to foo/bar.html
'../../index.html' # index.html relative to foo/bar/baz.html
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertEqual(from_file.url, 'index.html')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
from_file = File('file.html', '/path/to/docs', '/path/to/site', use_directory_urls=False)
expected = [
'file.html', # file.html relative to .
'../file.html', # file.html relative to foo/
'../../file.html', # file.html relative to foo/bar/
'../../../file.html', # file.html relative to foo/bar/baz/
'file.html', # file.html relative to foo.html
'../file.html', # file.html relative to foo/bar.html
'../../file.html' # file.html relative to foo/bar/baz.html
]
for i, filename in enumerate(to_files):
file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False)
self.assertEqual(from_file.url, 'file.html')
self.assertEqual(file.url, to_file_urls[i])
self.assertEqual(from_file.url_relative_to(file.url), expected[i])
self.assertEqual(from_file.url_relative_to(file), expected[i])
@tempdir(files=[
'index.md',
'bar.css',
'bar.html',
'bar.jpg',
'bar.js',
'bar.md',
'.dotfile',
'templates/foo.html'
])
def test_get_files(self, tdir):
config = load_config(docs_dir=tdir, extra_css=['bar.css'], extra_javascript=['bar.js'])
files = get_files(config)
expected = ['index.md', 'bar.css', 'bar.html', 'bar.jpg', 'bar.js', 'bar.md']
self.assertIsInstance(files, Files)
self.assertEqual(len(files), len(expected))
self.assertEqual([f.src_path for f in files], expected)
@tempdir(files=[
'README.md',
'foo.md'
])
def test_get_files_include_readme_without_index(self, tdir):
config = load_config(docs_dir=tdir)
files = get_files(config)
expected = ['README.md', 'foo.md']
self.assertIsInstance(files, Files)
self.assertEqual(len(files), len(expected))
self.assertEqual([f.src_path for f in files], expected)
@tempdir(files=[
'index.md',
'README.md',
'foo.md'
])
def test_get_files_exclude_readme_with_index(self, tdir):
config = load_config(docs_dir=tdir)
files = get_files(config)
expected = ['index.md', 'foo.md']
self.assertIsInstance(files, Files)
self.assertEqual(len(files), len(expected))
self.assertEqual([f.src_path for f in files], expected)
@tempdir()
@tempdir(files={'test.txt': 'source content'})
def test_copy_file(self, src_dir, dest_dir):
file = File('test.txt', src_dir, dest_dir, use_directory_urls=False)
dest_path = os.path.join(dest_dir, 'test.txt')
self.assertPathNotExists(dest_path)
file.copy_file()
self.assertPathIsFile(dest_path)
@tempdir(files={'test.txt': 'destination content'})
@tempdir(files={'test.txt': 'source content'})
def test_copy_file_clean_modified(self, src_dir, dest_dir):
file = File('test.txt', src_dir, dest_dir, use_directory_urls=False)
file.is_modified = mock.Mock(return_value=True)
dest_path = os.path.join(dest_dir, 'test.txt')
file.copy_file(dirty=False)
self.assertPathIsFile(dest_path)
with open(dest_path, 'r', encoding='utf-8') as f:
self.assertEqual(f.read(), 'source content')
@tempdir(files={'test.txt': 'destination content'})
@tempdir(files={'test.txt': 'source content'})
def test_copy_file_dirty_modified(self, src_dir, dest_dir):
file = File('test.txt', src_dir, dest_dir, use_directory_urls=False)
file.is_modified = mock.Mock(return_value=True)
dest_path = os.path.join(dest_dir, 'test.txt')
file.copy_file(dirty=True)
self.assertPathIsFile(dest_path)
with open(dest_path, 'r', encoding='utf-8') as f:
self.assertEqual(f.read(), 'source content')
@tempdir(files={'test.txt': 'destination content'})
@tempdir(files={'test.txt': 'source content'})
def test_copy_file_dirty_not_modified(self, src_dir, dest_dir):
file = File('test.txt', src_dir, dest_dir, use_directory_urls=False)
file.is_modified = mock.Mock(return_value=False)
dest_path = os.path.join(dest_dir, 'test.txt')
file.copy_file(dirty=True)
self.assertPathIsFile(dest_path)
with open(dest_path, 'r', encoding='utf-8') as f:
self.assertEqual(f.read(), 'destination content')
| 46.601182 | 106 | 0.604552 | 4,241 | 31,549 | 4.324216 | 0.034662 | 0.043841 | 0.087028 | 0.066743 | 0.914881 | 0.90283 | 0.886853 | 0.872294 | 0.843176 | 0.806096 | 0 | 0.000948 | 0.230816 | 31,549 | 676 | 107 | 46.670118 | 0.75477 | 0.059241 | 0 | 0.647255 | 0 | 0 | 0.181778 | 0.029447 | 0 | 0 | 0 | 0 | 0.497504 | 1 | 0.056572 | false | 0 | 0.008319 | 0 | 0.066556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8acaff37f88cf7f5e085a2dd0309643053909a77 | 280,930 | py | Python | plugins/modules/oci_waas_policy.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_waas_policy.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_waas_policy.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_waas_policy
short_description: Manage a WaasPolicy resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a WaasPolicy resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new Web Application Acceleration and Security (WAAS) policy in the specified compartment. A WAAS policy must be
established before creating Web Application Firewall (WAF) rules. To use WAF rules, your web application's origin servers must defined in the `WaasPolicy`
schema.
- A domain name must be specified when creating a WAAS policy. The domain name should be different from the origins specified in your `WaasPolicy`. Once
domain name is entered and stored, it is unchangeable.
- Use the record data returned in the `cname` field of the `WaasPolicy` object to create a CNAME record in your DNS configuration that will direct your
domain's traffic through the WAF.
- For the purposes of access control, you must provide the OCID of the compartment where you want the service to reside. For information about access
control and compartments, see L(Overview of the IAM Service,https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm).
- You must specify a display name and domain for the WAAS policy. The display name does not have to be unique and can be changed. The domain name should be
different from every origin specified in `WaasPolicy`.
- All Oracle Cloud Infrastructure resources, including WAAS policies, receive a unique, Oracle-assigned ID called an Oracle Cloud Identifier (OCID). When a
resource is created, you can find its OCID in the response. You can also retrieve a resource's OCID by using a list API operation for that resource type,
or by viewing the resource in the Console. Fore more information, see L(Resource
Identifiers,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm).
- "**Note:** After sending the POST request, the new object's state will temporarily be `CREATING`. Ensure that the resource's state has changed to `ACTIVE`
before use."
- "This resource has the following action operations in the M(oracle.oci.oci_waas_policy_actions) module: accept_recommendations, change_compartment,
purge_cache."
version_added: "2.9.0"
author: Oracle (@oracle)
options:
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment in which to create the WAAS policy.
- Required for create using I(state=present).
- Required for update when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- Required for delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
type: str
display_name:
description:
- A user-friendly name for the WAAS policy. The name can be changed and does not need to be unique.
- Required for create, update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
domain:
description:
- The web application domain that the WAAS policy protects.
- Required for create using I(state=present).
type: str
additional_domains:
description:
- An array of additional domains for the specified web application.
- This parameter is updatable.
type: list
elements: str
origins:
description:
- A map of host to origin for the web application. The key should be a customer friendly name for the host, ex. primary, secondary, etc.
- This parameter is updatable.
type: dict
suboptions:
uri:
description:
- The URI of the origin. Does not support paths. Port numbers should be specified in the `httpPort` and `httpsPort` fields.
type: str
required: true
http_port:
description:
- "The HTTP port on the origin that the web application listens on. If unspecified, defaults to `80`. If `0` is specified - the origin is
not used for HTTP traffic."
type: int
https_port:
description:
- "The HTTPS port on the origin that the web application listens on. If unspecified, defaults to `443`. If `0` is specified - the origin is
not used for HTTPS traffic."
type: int
custom_headers:
description:
- A list of HTTP headers to forward to your origin.
type: list
elements: dict
suboptions:
name:
description:
- The name of the header.
type: str
required: true
value:
description:
- The value of the header.
type: str
required: true
origin_groups:
description:
- The map of origin groups and their keys used to associate origins to the `wafConfig`. Origin groups allow you to apply weights to groups of
origins for load balancing purposes. Origins with higher weights will receive larger proportions of client requests.
To add additional origins to your WAAS policy, update the `origins` field of a `UpdateWaasPolicy` request.
- This parameter is updatable.
type: dict
suboptions:
origins:
description:
- The list of objects containing origin references and additional properties.
type: list
elements: dict
suboptions:
origin:
description:
- The IP address or CIDR notation of the origin server.
type: str
weight:
description:
- The weight of the origin used in load balancing. Origins with higher weights will receive larger proportions of client requests.
type: int
policy_config:
description:
- ""
- This parameter is updatable.
type: dict
suboptions:
certificate_id:
description:
- The OCID of the SSL certificate to use if HTTPS is supported.
type: str
is_https_enabled:
description:
- Enable or disable HTTPS support. If true, a `certificateId` is required. If unspecified, defaults to `false`.
type: bool
is_https_forced:
description:
- Force HTTP to HTTPS redirection. If unspecified, defaults to `false`.
type: bool
tls_protocols:
description:
- "A list of allowed TLS protocols. Only applicable when HTTPS support is enabled.
The TLS protocol is negotiated while the request is connecting and the most recent protocol supported by both the edge node and client
browser will be selected. If no such version exists, the connection will be aborted.
- **TLS_V1:** corresponds to TLS 1.0 specification."
- "- **TLS_V1_1:** corresponds to TLS 1.1 specification."
- "- **TLS_V1_2:** corresponds to TLS 1.2 specification."
- "- **TLS_V1_3:** corresponds to TLS 1.3 specification."
- Enabled TLS protocols must go in a row. For example if `TLS_v1_1` and `TLS_V1_3` are enabled, `TLS_V1_2` must be enabled too.
type: list
elements: str
choices:
- "TLS_V1"
- "TLS_V1_1"
- "TLS_V1_2"
- "TLS_V1_3"
is_origin_compression_enabled:
description:
- "Enable or disable GZIP compression of origin responses. If enabled, the header `Accept-Encoding: gzip` is sent to origin, otherwise, the
empty `Accept-Encoding:` header is used."
type: bool
is_behind_cdn:
description:
- Enabling `isBehindCdn` allows for the collection of IP addresses from client requests if the WAF is connected to a CDN.
type: bool
client_address_header:
description:
- Specifies an HTTP header name which is treated as the connecting client's IP address. Applicable only if `isBehindCdn` is enabled.
- The edge node reads this header and its value and sets the client IP address as specified. It does not create the header if the header is
not present in the request. If the header is not present, the connecting IP address will be used as the client's true IP address. It uses
the last IP address in the header's value as the true IP address.
- "Example: `X-Client-Ip: 11.1.1.1, 13.3.3.3`"
- In the case of multiple headers with the same name, only the first header will be used. It is assumed that CDN sets the correct client IP
address to prevent spoofing.
- "- **X_FORWARDED_FOR:** Corresponds to `X-Forwarded-For` header name."
- "- **X_CLIENT_IP:** Corresponds to `X-Client-Ip` header name."
- "- **X_REAL_IP:** Corresponds to `X-Real-Ip` header name."
- "- **CLIENT_IP:** Corresponds to `Client-Ip` header name."
- "- **TRUE_CLIENT_IP:** Corresponds to `True-Client-Ip` header name."
type: str
choices:
- "X_FORWARDED_FOR"
- "X_CLIENT_IP"
- "X_REAL_IP"
- "CLIENT_IP"
- "TRUE_CLIENT_IP"
is_cache_control_respected:
description:
- "Enable or disable automatic content caching based on the response `cache-control` header. This feature enables the origin to act as a
proxy cache. Caching is usually defined using `cache-control` header. For example `cache-control: max-age=120` means that the returned
resource is valid for 120 seconds. Caching rules will overwrite this setting."
type: bool
is_response_buffering_enabled:
description:
- Enable or disable buffering of responses from the origin. Buffering improves overall stability in case of network issues, but slightly
increases Time To First Byte.
type: bool
cipher_group:
description:
- "The set cipher group for the configured TLS protocol. This sets the configuration for the TLS connections between clients and edge nodes
only.
- **DEFAULT:** Cipher group supports TLS 1.0, TLS 1.1, TLS 1.2, TLS 1.3 protocols. It has the following ciphers enabled: `ECDHE-RSA-
AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-
DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-
RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-
DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-
SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:!DES-
CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA`"
type: str
choices:
- "DEFAULT"
load_balancing_method:
description:
- An object that represents a load balancing method and its properties.
type: dict
suboptions:
method:
description:
- Load balancing methods are algorithms used to efficiently distribute traffic among origin servers.
- "- **L(IP_HASH,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/IPHashLoadBalancingMethod):** All the incoming
requests from the same client IP address should go to the same content origination server. IP_HASH load balancing method uses
origin weights when choosing which origin should the hash be assigned to initially."
- "- **L(ROUND_ROBIN,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/RoundRobinLoadBalancingMethod):** Forwards
requests sequentially to the available origin servers. The first request - to the first origin server, the second request - to the
next origin server, and so on. After it sends a request to the last origin server, it starts again with the first origin server.
When using weights on origins, Weighted Round Robin assigns more requests to origins with a greater weight. Over a period of time,
origins will receive a number of requests in proportion to their weight."
- "- **L(STICKY_COOKIE,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/StickyCookieLoadBalancingMethod):** Adds a
session cookie to the first response from the origin server and identifies the server that sent the response. The client's next
request contains the cookie value, and nginx routes the request to the origin server that responded to the first request.
STICKY_COOKIE load balancing method falls back to Round Robin for the first request."
type: str
choices:
- "ROUND_ROBIN"
- "STICKY_COOKIE"
- "IP_HASH"
required: true
name:
description:
- The name of the cookie used to track the persistence.
Can contain any US-ASCII character except separator or control character.
- Applicable when method is 'STICKY_COOKIE'
type: str
domain:
description:
- The domain for which the cookie is set, defaults to WAAS policy domain.
- Applicable when method is 'STICKY_COOKIE'
type: str
expiration_time_in_seconds:
description:
- The time for which a browser should keep the cookie in seconds.
Empty value will cause the cookie to expire at the end of a browser session.
- Applicable when method is 'STICKY_COOKIE'
type: int
websocket_path_prefixes:
description:
- ModSecurity is not capable to inspect WebSockets. Therefore paths specified here have WAF disabled if Connection request header from the
client has the value Upgrade (case insensitive matching) and Upgrade request header has the value websocket (case insensitive matching).
Paths matches if the concatenation of request URL path and query starts with the contents of the one of `websocketPathPrefixes` array
value. In All other cases challenges, like JSC, HIC and etc., remain active.
type: list
elements: str
is_sni_enabled:
description:
- SNI stands for Server Name Indication and is an extension of the TLS protocol. It indicates which hostname is being contacted by the
browser at the beginning of the 'handshake'-process. This allows a server to connect multiple SSL Certificates to one IP address and port.
type: bool
health_checks:
description:
- ""
type: dict
suboptions:
is_enabled:
description:
- Enables or disables the health checks.
type: bool
method:
description:
- An HTTP verb (i.e. HEAD, GET, or POST) to use when performing the health check.
type: str
choices:
- "GET"
- "HEAD"
- "POST"
path:
description:
- Path to visit on your origins when performing the health check.
type: str
headers:
description:
- "HTTP header fields to include in health check requests, expressed as `\\"name\\": \\"value\\"` properties. Because HTTP header
field names are case-insensitive, any use of names that are case-insensitive equal to other names will be rejected. If Host is not
specified, requests will include a Host header field with value matching the policy's protected domain. If User-Agent is not
specified, requests will include a User-Agent header field with value \\"waf health checks\\"."
- "**Note:** The only currently-supported header fields are Host and User-Agent."
type: dict
expected_response_code_group:
description:
- "The HTTP response codes that signify a healthy state.
- **2XX:** Success response code group.
- **3XX:** Redirection response code group.
- **4XX:** Client errors response code group.
- **5XX:** Server errors response code group."
type: list
elements: str
choices:
- "2XX"
- "3XX"
- "4XX"
- "5XX"
is_response_text_check_enabled:
description:
- Enables or disables additional check for predefined text in addition to response code.
type: bool
expected_response_text:
description:
- Health check will search for the given text in a case-sensitive manner within the response body and will fail if the text is not
found.
type: str
interval_in_seconds:
description:
- Time between health checks of an individual origin server, in seconds.
type: int
timeout_in_seconds:
description:
- Response timeout represents wait time until request is considered failed, in seconds.
type: int
healthy_threshold:
description:
- Number of successful health checks after which the server is marked up.
type: int
unhealthy_threshold:
description:
- Number of failed health checks after which the server is marked down.
type: int
waf_config:
description:
- ""
- This parameter is updatable.
type: dict
suboptions:
access_rules:
description:
- The access rules applied to the Web Application Firewall. Access rules allow custom content access policies to be defined and `ALLOW`,
`DETECT`, or `BLOCK` actions to be taken on a request when specified criteria are met.
type: list
elements: dict
suboptions:
name:
description:
- The unique name of the access rule.
type: str
required: true
criteria:
description:
- The list of access rule criteria. The rule would be applied only for the requests that matched all the listed conditions.
type: list
elements: dict
required: true
suboptions:
condition:
description:
- "The criteria the access rule and JavaScript Challenge uses to determine if action should be taken on a request.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field. URL must start with a `/`.
- **URL_IS_NOT:** Matches if the concatenation of request URL path and query is not identical to the contents of the
`value` field. URL must start with a `/`.
- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the `value`
field. URL must start with a `/`.
- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the `value`
field.
- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the `value`
field.
- **URL_REGEX:** Matches if the concatenation of request URL path and query is described by the regular expression in the
value field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_MATCH_REGEX:** Matches if the concatenation of request URL path and query is not described by the regular
expression in the `value` field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_START_WITH:** Matches if the concatenation of request URL path and query does not start with the contents
of the `value` field.
- **URL_PART_DOES_NOT_CONTAIN:** Matches if the concatenation of request URL path and query does not contain the contents
of the `value` field.
- **URL_PART_DOES_NOT_END_WITH:** Matches if the concatenation of request URL path and query does not end with the
contents of the `value` field.
- **IP_IS:** Matches if the request originates from one of the IP addresses contained in the defined address list. The
`value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IS_NOT:** Matches if the request does not originate from any of the IP addresses contained in the defined address
list. The `value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IN_LIST:** Matches if the request originates from one of the IP addresses contained in the referenced address list.
The `value` in this case is OCID of the address list.
- **IP_NOT_IN_LIST:** Matches if the request does not originate from any IP address contained in the referenced address
list. The `value` field in this case is OCID of the address list.
- **HTTP_HEADER_CONTAINS:** The HTTP_HEADER_CONTAINS criteria is defined using a compound value separated by a colon: a
header field name and a header field value. `host:test.example.com` is an example of a criteria value where `host` is
the header field name and `test.example.com` is the header field value. A request matches when the header field name is
a case insensitive match and the header field value is a case insensitive, substring match.
*Example:* With a criteria value of `host:test.example.com`, where `host` is the name of the field and `test.example.com`
is the value of the host field, a request with the header values, `Host: www.test.example.com` will match, where as a
request with header values of `host: www.example.com` or `host: test.sub.example.com` will not match.
- **HTTP_METHOD_IS:** Matches if the request method is identical to one of the values listed in field. The `value` in this
case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available methods:
`GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **HTTP_METHOD_IS_NOT:** Matches if the request is not identical to any of the contents of the `value` field. The
`value` in this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available
methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **COUNTRY_IS:** Matches if the request originates from one of countries in the `value` field. The `value` in this case
is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1 alpha-2
format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **COUNTRY_IS_NOT:** Matches if the request does not originate from any of countries in the `value` field. The `value` in
this case is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1
alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **USER_AGENT_IS:** Matches if the requesting user agent is identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`
- **USER_AGENT_IS_NOT:** Matches if the requesting user agent is not identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`"
type: str
choices:
- "URL_IS"
- "URL_IS_NOT"
- "URL_STARTS_WITH"
- "URL_PART_ENDS_WITH"
- "URL_PART_CONTAINS"
- "URL_REGEX"
- "URL_DOES_NOT_MATCH_REGEX"
- "URL_DOES_NOT_START_WITH"
- "URL_PART_DOES_NOT_CONTAIN"
- "URL_PART_DOES_NOT_END_WITH"
- "IP_IS"
- "IP_IS_NOT"
- "IP_IN_LIST"
- "IP_NOT_IN_LIST"
- "HTTP_HEADER_CONTAINS"
- "HTTP_METHOD_IS"
- "HTTP_METHOD_IS_NOT"
- "COUNTRY_IS"
- "COUNTRY_IS_NOT"
- "USER_AGENT_IS"
- "USER_AGENT_IS_NOT"
required: true
value:
description:
- The criteria value.
type: str
required: true
is_case_sensitive:
description:
- When enabled, the condition will be matched with case-sensitive rules.
type: bool
action:
description:
- The action to take when the access criteria are met for a rule. If unspecified, defaults to `ALLOW`.
- "- **ALLOW:** Takes no action, just logs the request."
- "- **DETECT:** Takes no action, but creates an alert for the request."
- "- **BLOCK:** Blocks the request by returning specified response code or showing error page."
- "- **BYPASS:** Bypasses some or all challenges."
- "- **REDIRECT:** Redirects the request to the specified URL. These fields are required when `REDIRECT` is selected: `redirectUrl`,
`redirectResponseCode`."
- "- **SHOW_CAPTCHA:** Show a CAPTCHA Challenge page instead of the requested page."
- Regardless of action, no further rules are processed once a rule is matched.
type: str
choices:
- "ALLOW"
- "DETECT"
- "BLOCK"
- "BYPASS"
- "REDIRECT"
- "SHOW_CAPTCHA"
required: true
block_action:
description:
- The method used to block requests if `action` is set to `BLOCK` and the access criteria are met. If unspecified, defaults to
`SET_RESPONSE_CODE`.
type: str
choices:
- "SET_RESPONSE_CODE"
- "SHOW_ERROR_PAGE"
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE`, and the access
criteria are met. If unspecified, defaults to `403`. The list of available response codes: `200`, `201`, `202`, `204`, `206`,
`300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`, `408`, `409`, `411`, `412`, `413`, `414`, `415`,
`416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`, `502`, `503`, `504`, `507`."
type: int
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the access
criteria are met. If unspecified, defaults to 'Access to the website is blocked.'
type: str
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
access criteria are met. If unspecified, defaults to 'Access rules'.
type: str
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
access criteria are met. If unspecified, defaults to 'Access blocked by website owner. Please contact support.'
type: str
bypass_challenges:
description:
- The list of challenges to bypass when `action` is set to `BYPASS`. If unspecified or empty, all challenges are bypassed.
- "- **JS_CHALLENGE:** Bypasses JavaScript Challenge."
- "- **DEVICE_FINGERPRINT_CHALLENGE:** Bypasses Device Fingerprint Challenge."
- "- **HUMAN_INTERACTION_CHALLENGE:** Bypasses Human Interaction Challenge."
- "- **CAPTCHA:** Bypasses CAPTCHA Challenge."
type: list
elements: str
choices:
- "JS_CHALLENGE"
- "DEVICE_FINGERPRINT_CHALLENGE"
- "HUMAN_INTERACTION_CHALLENGE"
- "CAPTCHA"
redirect_url:
description:
- The target to which the request should be redirected, represented as a URI reference. Required when `action` is `REDIRECT`.
type: str
redirect_response_code:
description:
- The response status code to return when `action` is set to `REDIRECT`.
- "- **MOVED_PERMANENTLY:** Used for designating the permanent movement of a page (numerical code - 301)."
- "- **FOUND:** Used for designating the temporary movement of a page (numerical code - 302)."
type: str
choices:
- "MOVED_PERMANENTLY"
- "FOUND"
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is challenged.
type: str
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
type: str
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
type: str
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
type: str
response_header_manipulation:
description:
- An object that represents an action to apply to an HTTP response headers if all rule criteria will be matched regardless of
`action` value.
type: list
elements: dict
suboptions:
action:
description:
- ""
type: str
choices:
- "EXTEND_HTTP_RESPONSE_HEADER"
- "ADD_HTTP_RESPONSE_HEADER"
- "REMOVE_HTTP_RESPONSE_HEADER"
required: true
header:
description:
- A header field name that conforms to RFC 7230.
- "Example: `example_header_name`"
type: str
required: true
value:
description:
- A header field value that conforms to RFC 7230.
- "Example: `example_value`"
- Required when action is one of ['ADD_HTTP_RESPONSE_HEADER', 'EXTEND_HTTP_RESPONSE_HEADER']
type: str
address_rate_limiting:
description:
- The settings used to limit the number of requests from an IP address.
type: dict
suboptions:
is_enabled:
description:
- Enables or disables the address rate limiting Web Application Firewall feature.
type: bool
required: true
allowed_rate_per_address:
description:
- The number of allowed requests per second from one IP address. If unspecified, defaults to `1`.
type: int
max_delayed_count_per_address:
description:
- The maximum number of requests allowed to be queued before subsequent requests are dropped. If unspecified, defaults to `10`.
type: int
block_response_code:
description:
- "The response status code returned when a request is blocked. If unspecified, defaults to `503`. The list of available response
codes: `400`, `401`, `403`, `404`, `405`, `408`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `494`, `495`, `496`,
`497`, `499`, `500`, `501`, `502`, `503`, `504`, `507`."
type: int
captchas:
description:
- A list of CAPTCHA challenge settings. CAPTCHAs challenge requests to ensure a human is attempting to reach the specified URL and not a
bot.
type: list
elements: dict
suboptions:
url:
description:
- The unique URL path at which to show the CAPTCHA challenge.
type: str
required: true
session_expiration_in_seconds:
description:
- The amount of time before the CAPTCHA expires, in seconds. If unspecified, defaults to `300`.
type: int
required: true
title:
description:
- The title used when displaying a CAPTCHA challenge. If unspecified, defaults to `Are you human?`
type: str
required: true
header_text:
description:
- The text to show in the header when showing a CAPTCHA challenge. If unspecified, defaults to 'We have detected an increased number
of attempts to access this website. To help us keep this site secure, please let us know that you are not a robot by entering the
text from the image below.'
type: str
footer_text:
description:
- The text to show in the footer when showing a CAPTCHA challenge. If unspecified, defaults to 'Enter the letters and numbers as
they are shown in the image above.'
type: str
failure_message:
description:
- The text to show when incorrect CAPTCHA text is entered. If unspecified, defaults to `The CAPTCHA was incorrect. Try again.`
type: str
required: true
submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button. If unspecified, defaults to `Yes, I am human`.
type: str
required: true
device_fingerprint_challenge:
description:
- The device fingerprint challenge settings. Blocks bots based on unique device fingerprint information.
type: dict
suboptions:
is_enabled:
description:
- Enables or disables the device fingerprint challenge Web Application Firewall feature.
type: bool
required: true
action:
description:
- The action to take on requests from detected bots. If unspecified, defaults to `DETECT`.
type: str
choices:
- "DETECT"
- "BLOCK"
failure_threshold:
description:
- The number of failed requests allowed before taking action. If unspecified, defaults to `10`.
type: int
action_expiration_in_seconds:
description:
- The number of seconds between challenges for the same IP address. If unspecified, defaults to `60`.
type: int
failure_threshold_expiration_in_seconds:
description:
- The number of seconds before the failure threshold resets. If unspecified, defaults to `60`.
type: int
max_address_count:
description:
- The maximum number of IP addresses permitted with the same device fingerprint. If unspecified, defaults to `20`.
type: int
max_address_count_expiration_in_seconds:
description:
- The number of seconds before the maximum addresses count resets. If unspecified, defaults to `60`.
type: int
challenge_settings:
description:
- ""
type: dict
suboptions:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
type: str
choices:
- "SET_RESPONSE_CODE"
- "SHOW_ERROR_PAGE"
- "SHOW_CAPTCHA"
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response codes:
`200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`, `408`,
`409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`, `502`,
`503`, `504`, `507`."
type: int
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
request is blocked. If unspecified, defaults to `Access to the website is blocked`.
type: str
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`,
and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please contact support.`
type: str
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE` and
the request is blocked. If unspecified, defaults to `403`.
type: str
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_CAPTCHA`,
and the request is blocked. If unspecified, defaults to `Are you human?`
type: str
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of attempts
to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by entering the
text from captcha below.`
type: str
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are shown in
image above`.
type: str
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
type: str
human_interaction_challenge:
description:
- The human interaction challenge settings. Detects natural human interactions such as mouse movements, time on site, and page scrolling to
identify bots.
type: dict
suboptions:
is_enabled:
description:
- Enables or disables the human interaction challenge Web Application Firewall feature.
type: bool
required: true
action:
description:
- The action to take against requests from detected bots. If unspecified, defaults to `DETECT`.
type: str
choices:
- "DETECT"
- "BLOCK"
failure_threshold:
description:
- The number of failed requests before taking action. If unspecified, defaults to `10`.
type: int
action_expiration_in_seconds:
description:
- The number of seconds between challenges for the same IP address. If unspecified, defaults to `60`.
type: int
failure_threshold_expiration_in_seconds:
description:
- The number of seconds before the failure threshold resets. If unspecified, defaults to `60`.
type: int
interaction_threshold:
description:
- The number of interactions required to pass the challenge. If unspecified, defaults to `3`.
type: int
recording_period_in_seconds:
description:
- The number of seconds to record the interactions from the user. If unspecified, defaults to `15`.
type: int
set_http_header:
description:
- Adds an additional HTTP header to requests that fail the challenge before being passed to the origin. Only applicable when the
`action` is set to `DETECT`.
type: dict
suboptions:
name:
description:
- The name of the header.
type: str
required: true
value:
description:
- The value of the header.
type: str
required: true
challenge_settings:
description:
- ""
type: dict
suboptions:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
type: str
choices:
- "SET_RESPONSE_CODE"
- "SHOW_ERROR_PAGE"
- "SHOW_CAPTCHA"
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response codes:
`200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`, `408`,
`409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`, `502`,
`503`, `504`, `507`."
type: int
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
request is blocked. If unspecified, defaults to `Access to the website is blocked`.
type: str
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`,
and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please contact support.`
type: str
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE` and
the request is blocked. If unspecified, defaults to `403`.
type: str
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_CAPTCHA`,
and the request is blocked. If unspecified, defaults to `Are you human?`
type: str
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of attempts
to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by entering the
text from captcha below.`
type: str
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are shown in
image above`.
type: str
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
type: str
is_nat_enabled:
description:
- When enabled, the user is identified not only by the IP address but also by an unique additional hash, which prevents blocking
visitors with shared IP addresses.
type: bool
js_challenge:
description:
- The JavaScript challenge settings. Blocks bots by challenging requests from browsers that have no JavaScript support.
type: dict
suboptions:
is_enabled:
description:
- Enables or disables the JavaScript challenge Web Application Firewall feature.
type: bool
required: true
action:
description:
- The action to take against requests from detected bots. If unspecified, defaults to `DETECT`.
type: str
choices:
- "DETECT"
- "BLOCK"
failure_threshold:
description:
- The number of failed requests before taking action. If unspecified, defaults to `10`.
type: int
action_expiration_in_seconds:
description:
- The number of seconds between challenges from the same IP address. If unspecified, defaults to `60`.
type: int
set_http_header:
description:
- Adds an additional HTTP header to requests that fail the challenge before being passed to the origin. Only applicable when the
`action` is set to `DETECT`.
type: dict
suboptions:
name:
description:
- The name of the header.
type: str
required: true
value:
description:
- The value of the header.
type: str
required: true
challenge_settings:
description:
- ""
type: dict
suboptions:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
type: str
choices:
- "SET_RESPONSE_CODE"
- "SHOW_ERROR_PAGE"
- "SHOW_CAPTCHA"
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response codes:
`200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`, `408`,
`409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`, `502`,
`503`, `504`, `507`."
type: int
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
request is blocked. If unspecified, defaults to `Access to the website is blocked`.
type: str
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`,
and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please contact support.`
type: str
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE` and
the request is blocked. If unspecified, defaults to `403`.
type: str
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_CAPTCHA`,
and the request is blocked. If unspecified, defaults to `Are you human?`
type: str
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of attempts
to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by entering the
text from captcha below.`
type: str
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are shown in
image above`.
type: str
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
type: str
are_redirects_challenged:
description:
- When enabled, redirect responses from the origin will also be challenged. This will change HTTP 301/302 responses from origin to
HTTP 200 with an HTML body containing JavaScript page redirection.
type: bool
criteria:
description:
- When defined, the JavaScript Challenge would be applied only for the requests that matched all the listed conditions.
type: list
elements: dict
suboptions:
condition:
description:
- "The criteria the access rule and JavaScript Challenge uses to determine if action should be taken on a request.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field. URL must start with a `/`.
- **URL_IS_NOT:** Matches if the concatenation of request URL path and query is not identical to the contents of the
`value` field. URL must start with a `/`.
- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the `value`
field. URL must start with a `/`.
- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the `value`
field.
- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the `value`
field.
- **URL_REGEX:** Matches if the concatenation of request URL path and query is described by the regular expression in the
value field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_MATCH_REGEX:** Matches if the concatenation of request URL path and query is not described by the regular
expression in the `value` field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_START_WITH:** Matches if the concatenation of request URL path and query does not start with the contents
of the `value` field.
- **URL_PART_DOES_NOT_CONTAIN:** Matches if the concatenation of request URL path and query does not contain the contents
of the `value` field.
- **URL_PART_DOES_NOT_END_WITH:** Matches if the concatenation of request URL path and query does not end with the
contents of the `value` field.
- **IP_IS:** Matches if the request originates from one of the IP addresses contained in the defined address list. The
`value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IS_NOT:** Matches if the request does not originate from any of the IP addresses contained in the defined address
list. The `value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IN_LIST:** Matches if the request originates from one of the IP addresses contained in the referenced address list.
The `value` in this case is OCID of the address list.
- **IP_NOT_IN_LIST:** Matches if the request does not originate from any IP address contained in the referenced address
list. The `value` field in this case is OCID of the address list.
- **HTTP_HEADER_CONTAINS:** The HTTP_HEADER_CONTAINS criteria is defined using a compound value separated by a colon: a
header field name and a header field value. `host:test.example.com` is an example of a criteria value where `host` is
the header field name and `test.example.com` is the header field value. A request matches when the header field name is
a case insensitive match and the header field value is a case insensitive, substring match.
*Example:* With a criteria value of `host:test.example.com`, where `host` is the name of the field and `test.example.com`
is the value of the host field, a request with the header values, `Host: www.test.example.com` will match, where as a
request with header values of `host: www.example.com` or `host: test.sub.example.com` will not match.
- **HTTP_METHOD_IS:** Matches if the request method is identical to one of the values listed in field. The `value` in this
case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available methods:
`GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **HTTP_METHOD_IS_NOT:** Matches if the request is not identical to any of the contents of the `value` field. The
`value` in this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available
methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **COUNTRY_IS:** Matches if the request originates from one of countries in the `value` field. The `value` in this case
is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1 alpha-2
format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **COUNTRY_IS_NOT:** Matches if the request does not originate from any of countries in the `value` field. The `value` in
this case is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1
alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **USER_AGENT_IS:** Matches if the requesting user agent is identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`
- **USER_AGENT_IS_NOT:** Matches if the requesting user agent is not identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`"
type: str
choices:
- "URL_IS"
- "URL_IS_NOT"
- "URL_STARTS_WITH"
- "URL_PART_ENDS_WITH"
- "URL_PART_CONTAINS"
- "URL_REGEX"
- "URL_DOES_NOT_MATCH_REGEX"
- "URL_DOES_NOT_START_WITH"
- "URL_PART_DOES_NOT_CONTAIN"
- "URL_PART_DOES_NOT_END_WITH"
- "IP_IS"
- "IP_IS_NOT"
- "IP_IN_LIST"
- "IP_NOT_IN_LIST"
- "HTTP_HEADER_CONTAINS"
- "HTTP_METHOD_IS"
- "HTTP_METHOD_IS_NOT"
- "COUNTRY_IS"
- "COUNTRY_IS_NOT"
- "USER_AGENT_IS"
- "USER_AGENT_IS_NOT"
required: true
value:
description:
- The criteria value.
type: str
required: true
is_case_sensitive:
description:
- When enabled, the condition will be matched with case-sensitive rules.
type: bool
is_nat_enabled:
description:
- When enabled, the user is identified not only by the IP address but also by an unique additional hash, which prevents blocking
visitors with shared IP addresses.
type: bool
origin:
description:
- The key in the map of origins referencing the origin used for the Web Application Firewall. The origin must already be included in
`Origins`. Required when creating the `WafConfig` resource, but is not required upon updating the configuration.
- This parameter is updatable.
type: str
caching_rules:
description:
- A list of caching rules applied to the web application.
type: list
elements: dict
suboptions:
key:
description:
- The unique key for the caching rule.
type: str
name:
description:
- The name of the caching rule.
type: str
required: true
action:
description:
- "The action to take when the criteria of a caching rule are met.
- **CACHE:** Caches requested content when the criteria of the rule are met."
- "- **BYPASS_CACHE:** Allows requests to bypass the cache and be directed to the origin when the criteria of the rule is met."
type: str
choices:
- "CACHE"
- "BYPASS_CACHE"
required: true
caching_duration:
description:
- "The duration to cache content for the caching rule, specified in ISO 8601 extended format. Supported units: seconds, minutes,
hours, days, weeks, months. The maximum value that can be set for any unit is `99`. Mixing of multiple units is not supported.
Only applies when the `action` is set to `CACHE`.
Example: `PT1H`"
type: str
is_client_caching_enabled:
description:
- Enables or disables client caching.
Browsers use the `Cache-Control` header value for caching content locally in the browser. This setting overrides the addition of a
`Cache-Control` header in responses.
type: bool
client_caching_duration:
description:
- "The duration to cache content in the user's browser, specified in ISO 8601 extended format. Supported units: seconds, minutes,
hours, days, weeks, months. The maximum value that can be set for any unit is `99`. Mixing of multiple units is not supported.
Only applies when the `action` is set to `CACHE`.
Example: `PT1H`"
type: str
criteria:
description:
- The array of the rule criteria with condition and value. The caching rule would be applied for the requests that matched any of
the listed conditions.
type: list
elements: dict
required: true
suboptions:
condition:
description:
- "The condition of the caching rule criteria.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field."
- "- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the `value`
field."
- "- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the
`value` field."
- "- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the `value`
field."
- URLs must start with a `/`. URLs can't contain restricted double slashes `//`. URLs can't contain the restricted `'` `&`
`?` symbols. Resources to cache can only be specified by a URL, any query parameters are ignored.
type: str
choices:
- "URL_IS"
- "URL_STARTS_WITH"
- "URL_PART_ENDS_WITH"
- "URL_PART_CONTAINS"
required: true
value:
description:
- The value of the caching rule criteria.
type: str
required: true
custom_protection_rules:
description:
- A list of the custom protection rule OCIDs and their actions.
type: list
elements: dict
suboptions:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule.
type: str
action:
description:
- "The action to take when the custom protection rule is triggered.
`DETECT` - Logs the request when the criteria of the custom protection rule are met. `BLOCK` - Blocks the request when the
criteria of the custom protection rule are met."
type: str
choices:
- "DETECT"
- "BLOCK"
exclusions:
description:
- ""
type: list
elements: dict
suboptions:
target:
description:
- The target of the exclusion.
type: str
choices:
- "REQUEST_COOKIES"
- "REQUEST_COOKIE_NAMES"
- "ARGS"
- "ARGS_NAMES"
exclusions:
description:
- ""
type: list
elements: str
origin_groups:
description:
- The map of origin groups and their keys used to associate origins to the `wafConfig`. Origin groups allow you to apply weights to groups
of origins for load balancing purposes. Origins with higher weights will receive larger proportions of client requests.
To add additional origins to your WAAS policy, update the `origins` field of a `UpdateWaasPolicy` request.
- This parameter is updatable.
type: list
elements: str
protection_settings:
description:
- The settings applied to protection rules.
type: dict
suboptions:
block_action:
description:
- If `action` is set to `BLOCK`, this specifies how the traffic is blocked when detected as malicious by a protection rule. If
unspecified, defaults to `SET_RESPONSE_CODE`.
type: str
choices:
- "SHOW_ERROR_PAGE"
- "SET_RESPONSE_CODE"
block_response_code:
description:
- "The response code returned when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE`, and the traffic is
detected as malicious by a protection rule. If unspecified, defaults to `403`. The list of available response codes: `400`, `401`,
`403`, `405`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `500`, `501`, `502`, `503`, `504`, `507`."
type: int
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the traffic
is detected as malicious by a protection rule. If unspecified, defaults to 'Access to the website is blocked.'
type: str
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
traffic is detected as malicious by a protection rule. If unspecified, defaults to `403`.
type: str
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
traffic is detected as malicious by a protection rule. If unspecified, defaults to `Access blocked by website owner. Please
contact support.`
type: str
max_argument_count:
description:
- "The maximum number of arguments allowed to be passed to your application before an action is taken. Arguements are query
parameters or body parameters in a PUT or POST request. If unspecified, defaults to `255`. This setting only applies if a
corresponding protection rule is enabled, such as the \\"Number of Arguments Limits\\" rule (key: 960335)."
- "Example: If `maxArgumentCount` to `2` for the Max Number of Arguments protection rule (key: 960335), the following requests would
be blocked:
`GET /myapp/path?query=one&query=two&query=three`
`POST /myapp/path` with Body `{\\"argument1\\":\\"one\\",\\"argument2\\":\\"two\\",\\"argument3\\":\\"three\\"}`"
type: int
max_name_length_per_argument:
description:
- "The maximum length allowed for each argument name, in characters. Arguements are query parameters or body parameters in a PUT or
POST request. If unspecified, defaults to `400`. This setting only applies if a corresponding protection rule is enabled, such as
the \\"Values Limits\\" rule (key: 960208)."
type: int
max_total_name_length_of_arguments:
description:
- "The maximum length allowed for the sum of the argument name and value, in characters. Arguements are query parameters or body
parameters in a PUT or POST request. If unspecified, defaults to `64000`. This setting only applies if a corresponding protection
rule is enabled, such as the \\"Total Arguments Limits\\" rule (key: 960341)."
type: int
recommendations_period_in_days:
description:
- The length of time to analyze traffic traffic, in days. After the analysis period, `WafRecommendations` will be populated. If
unspecified, defaults to `10`.
- Use `GET /waasPolicies/{waasPolicyId}/wafRecommendations` to view WAF recommendations.
type: int
is_response_inspected:
description:
- Inspects the response body of origin responses. Can be used to detect leakage of sensitive data. If unspecified, defaults to
`false`.
- "**Note:** Only origin responses with a Content-Type matching a value in `mediaTypes` will be inspected."
type: bool
max_response_size_in_ki_b:
description:
- The maximum response size to be fully inspected, in binary kilobytes (KiB). Anything over this limit will be partially inspected.
If unspecified, defaults to `1024`.
type: int
allowed_http_methods:
description:
- "The list of allowed HTTP methods. If unspecified, default to `[OPTIONS, GET, HEAD, POST]`. This setting only applies if a
corresponding protection rule is enabled, such as the \\"Restrict HTTP Request Methods\\" rule (key: 911100)."
type: list
elements: str
choices:
- "OPTIONS"
- "GET"
- "HEAD"
- "POST"
- "PUT"
- "DELETE"
- "TRACE"
- "CONNECT"
- "PATCH"
- "PROPFIND"
media_types:
description:
- "The list of media types to allow for inspection, if `isResponseInspected` is enabled. Only responses with MIME types in this list
will be inspected. If unspecified, defaults to `[\\"text/html\\", \\"text/plain\\", \\"text/xml\\"]`."
- " Supported MIME types include:"
- " - text/html
- text/plain
- text/asp
- text/css
- text/x-script
- application/json
- text/webviewhtml
- text/x-java-source
- application/x-javascript
- application/javascript
- application/ecmascript
- text/javascript
- text/ecmascript
- text/x-script.perl
- text/x-script.phyton
- application/plain
- application/xml
- text/xml"
type: list
elements: str
whitelists:
description:
- A list of IP addresses that bypass the Web Application Firewall.
type: list
elements: dict
suboptions:
name:
description:
- The unique name of the whitelist.
type: str
required: true
addresses:
description:
- A set of IP addresses or CIDR notations to include in the whitelist.
type: list
elements: str
address_lists:
description:
- A list of L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of IP address lists to include in the
whitelist.
type: list
elements: str
good_bots:
description:
- A list of bots allowed to access the web application.
type: list
elements: dict
suboptions:
key:
description:
- The unique key for the bot.
- This parameter is updatable.
type: str
required: true
name:
description:
- The bot name.
- This parameter is updatable.
type: str
is_enabled:
description:
- Enables or disables the bot.
- This parameter is updatable.
type: bool
required: true
description:
description:
- The description of the bot.
- This parameter is updatable.
type: str
protection_rules:
description:
- A list of the protection rules and their details.
type: list
elements: dict
suboptions:
key:
description:
- The unique key of the protection rule.
- This parameter is updatable.
type: str
mod_security_rule_ids:
description:
- The list of the ModSecurity rule IDs that apply to this protection rule. For more information about ModSecurity's open source WAF
rules, see L(Mod Security's documentation,https://www.modsecurity.org/CRS/Documentation/index.html).
- This parameter is updatable.
type: list
elements: str
name:
description:
- The name of the protection rule.
- This parameter is updatable.
type: str
description:
description:
- The description of the protection rule.
- This parameter is updatable.
type: str
action:
description:
- The action to take when the traffic is detected as malicious. If unspecified, defaults to `OFF`.
- This parameter is updatable.
type: str
choices:
- "OFF"
- "DETECT"
- "BLOCK"
labels:
description:
- The list of labels for the protection rule.
- "**Note:** Protection rules with a `ResponseBody` label will have no effect unless `isResponseInspected` is true."
- This parameter is updatable.
type: list
elements: str
exclusions:
description:
- ""
type: list
elements: dict
suboptions:
target:
description:
- The target of the exclusion.
- This parameter is updatable.
type: str
choices:
- "REQUEST_COOKIES"
- "REQUEST_COOKIE_NAMES"
- "ARGS"
- "ARGS_NAMES"
exclusions:
description:
- ""
- This parameter is updatable.
type: list
elements: str
threat_feeds:
description:
- A list of threat intelligence feeds and the actions to apply to known malicious traffic based on internet intelligence.
type: list
elements: dict
suboptions:
key:
description:
- The unique key of the threat intelligence feed.
- This parameter is updatable.
type: str
name:
description:
- The name of the threat intelligence feed.
- This parameter is updatable.
type: str
action:
description:
- The action to take when traffic is flagged as malicious by data from the threat intelligence feed. If unspecified, defaults to
`OFF`.
- This parameter is updatable.
type: str
choices:
- "OFF"
- "DETECT"
- "BLOCK"
description:
description:
- The description of the threat intelligence feed.
- This parameter is updatable.
type: str
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
- This parameter is updatable.
type: dict
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
- This parameter is updatable.
type: dict
waas_policy_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the WAAS policy.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the WaasPolicy.
- Use I(state=present) to create or update a WaasPolicy.
- Use I(state=absent) to delete a WaasPolicy.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create waas_policy
oci_waas_policy:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
domain: domain_example
# optional
display_name: display_name_example
additional_domains: [ "additional_domains_example" ]
origins:
# required
uri: uri_example
# optional
http_port: 56
https_port: 56
custom_headers:
- # required
name: name_example
value: value_example
origin_groups:
# optional
origins:
- # optional
origin: origin_example
weight: 56
policy_config:
# optional
certificate_id: "ocid1.certificate.oc1..xxxxxxEXAMPLExxxxxx"
is_https_enabled: true
is_https_forced: true
tls_protocols: [ "TLS_V1" ]
is_origin_compression_enabled: true
is_behind_cdn: true
client_address_header: X_FORWARDED_FOR
is_cache_control_respected: true
is_response_buffering_enabled: true
cipher_group: DEFAULT
load_balancing_method:
# required
method: ROUND_ROBIN
websocket_path_prefixes: [ "websocket_path_prefixes_example" ]
is_sni_enabled: true
health_checks:
# optional
is_enabled: true
method: GET
path: path_example
headers: null
expected_response_code_group: [ "2XX" ]
is_response_text_check_enabled: true
expected_response_text: expected_response_text_example
interval_in_seconds: 56
timeout_in_seconds: 56
healthy_threshold: 56
unhealthy_threshold: 56
waf_config:
# optional
access_rules:
- # required
name: name_example
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
action: ALLOW
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
bypass_challenges: [ "JS_CHALLENGE" ]
redirect_url: redirect_url_example
redirect_response_code: MOVED_PERMANENTLY
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
response_header_manipulation:
- # required
action: EXTEND_HTTP_RESPONSE_HEADER
header: header_example
value: value_example
address_rate_limiting:
# required
is_enabled: true
# optional
allowed_rate_per_address: 56
max_delayed_count_per_address: 56
block_response_code: 56
captchas:
- # required
url: url_example
session_expiration_in_seconds: 56
title: title_example
failure_message: failure_message_example
submit_label: submit_label_example
# optional
header_text: header_text_example
footer_text: footer_text_example
device_fingerprint_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
max_address_count: 56
max_address_count_expiration_in_seconds: 56
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
human_interaction_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
interaction_threshold: 56
recording_period_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
is_nat_enabled: true
js_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
are_redirects_challenged: true
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
is_nat_enabled: true
origin: origin_example
caching_rules:
- # required
name: name_example
action: CACHE
criteria:
- # required
condition: URL_IS
value: value_example
# optional
key: key_example
caching_duration: caching_duration_example
is_client_caching_enabled: true
client_caching_duration: client_caching_duration_example
custom_protection_rules:
- # optional
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
action: DETECT
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
origin_groups: [ "origin_groups_example" ]
protection_settings:
# optional
block_action: SHOW_ERROR_PAGE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
max_argument_count: 56
max_name_length_per_argument: 56
max_total_name_length_of_arguments: 56
recommendations_period_in_days: 56
is_response_inspected: true
max_response_size_in_ki_b: 56
allowed_http_methods: [ "OPTIONS" ]
media_types: [ "media_types_example" ]
whitelists:
- # required
name: name_example
# optional
addresses: [ "addresses_example" ]
address_lists: [ "address_lists_example" ]
good_bots:
- # required
key: key_example
is_enabled: true
# optional
name: name_example
description: description_example
protection_rules:
- # optional
key: key_example
mod_security_rule_ids: [ "mod_security_rule_ids_example" ]
name: name_example
description: description_example
action: OFF
labels: [ "labels_example" ]
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
threat_feeds:
- # optional
key: key_example
name: name_example
action: OFF
description: description_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Update waas_policy
oci_waas_policy:
# required
waas_policy_id: "ocid1.waaspolicy.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
additional_domains: [ "additional_domains_example" ]
origins:
# required
uri: uri_example
# optional
http_port: 56
https_port: 56
custom_headers:
- # required
name: name_example
value: value_example
origin_groups:
# optional
origins:
- # optional
origin: origin_example
weight: 56
policy_config:
# optional
certificate_id: "ocid1.certificate.oc1..xxxxxxEXAMPLExxxxxx"
is_https_enabled: true
is_https_forced: true
tls_protocols: [ "TLS_V1" ]
is_origin_compression_enabled: true
is_behind_cdn: true
client_address_header: X_FORWARDED_FOR
is_cache_control_respected: true
is_response_buffering_enabled: true
cipher_group: DEFAULT
load_balancing_method:
# required
method: ROUND_ROBIN
websocket_path_prefixes: [ "websocket_path_prefixes_example" ]
is_sni_enabled: true
health_checks:
# optional
is_enabled: true
method: GET
path: path_example
headers: null
expected_response_code_group: [ "2XX" ]
is_response_text_check_enabled: true
expected_response_text: expected_response_text_example
interval_in_seconds: 56
timeout_in_seconds: 56
healthy_threshold: 56
unhealthy_threshold: 56
waf_config:
# optional
access_rules:
- # required
name: name_example
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
action: ALLOW
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
bypass_challenges: [ "JS_CHALLENGE" ]
redirect_url: redirect_url_example
redirect_response_code: MOVED_PERMANENTLY
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
response_header_manipulation:
- # required
action: EXTEND_HTTP_RESPONSE_HEADER
header: header_example
value: value_example
address_rate_limiting:
# required
is_enabled: true
# optional
allowed_rate_per_address: 56
max_delayed_count_per_address: 56
block_response_code: 56
captchas:
- # required
url: url_example
session_expiration_in_seconds: 56
title: title_example
failure_message: failure_message_example
submit_label: submit_label_example
# optional
header_text: header_text_example
footer_text: footer_text_example
device_fingerprint_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
max_address_count: 56
max_address_count_expiration_in_seconds: 56
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
human_interaction_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
interaction_threshold: 56
recording_period_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
is_nat_enabled: true
js_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
are_redirects_challenged: true
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
is_nat_enabled: true
origin: origin_example
caching_rules:
- # required
name: name_example
action: CACHE
criteria:
- # required
condition: URL_IS
value: value_example
# optional
key: key_example
caching_duration: caching_duration_example
is_client_caching_enabled: true
client_caching_duration: client_caching_duration_example
custom_protection_rules:
- # optional
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
action: DETECT
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
origin_groups: [ "origin_groups_example" ]
protection_settings:
# optional
block_action: SHOW_ERROR_PAGE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
max_argument_count: 56
max_name_length_per_argument: 56
max_total_name_length_of_arguments: 56
recommendations_period_in_days: 56
is_response_inspected: true
max_response_size_in_ki_b: 56
allowed_http_methods: [ "OPTIONS" ]
media_types: [ "media_types_example" ]
whitelists:
- # required
name: name_example
# optional
addresses: [ "addresses_example" ]
address_lists: [ "address_lists_example" ]
good_bots:
- # required
key: key_example
is_enabled: true
# optional
name: name_example
description: description_example
protection_rules:
- # optional
key: key_example
mod_security_rule_ids: [ "mod_security_rule_ids_example" ]
name: name_example
description: description_example
action: OFF
labels: [ "labels_example" ]
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
threat_feeds:
- # optional
key: key_example
name: name_example
action: OFF
description: description_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Update waas_policy using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_waas_policy:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name: display_name_example
# optional
additional_domains: [ "additional_domains_example" ]
origins:
# required
uri: uri_example
# optional
http_port: 56
https_port: 56
custom_headers:
- # required
name: name_example
value: value_example
origin_groups:
# optional
origins:
- # optional
origin: origin_example
weight: 56
policy_config:
# optional
certificate_id: "ocid1.certificate.oc1..xxxxxxEXAMPLExxxxxx"
is_https_enabled: true
is_https_forced: true
tls_protocols: [ "TLS_V1" ]
is_origin_compression_enabled: true
is_behind_cdn: true
client_address_header: X_FORWARDED_FOR
is_cache_control_respected: true
is_response_buffering_enabled: true
cipher_group: DEFAULT
load_balancing_method:
# required
method: ROUND_ROBIN
websocket_path_prefixes: [ "websocket_path_prefixes_example" ]
is_sni_enabled: true
health_checks:
# optional
is_enabled: true
method: GET
path: path_example
headers: null
expected_response_code_group: [ "2XX" ]
is_response_text_check_enabled: true
expected_response_text: expected_response_text_example
interval_in_seconds: 56
timeout_in_seconds: 56
healthy_threshold: 56
unhealthy_threshold: 56
waf_config:
# optional
access_rules:
- # required
name: name_example
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
action: ALLOW
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
bypass_challenges: [ "JS_CHALLENGE" ]
redirect_url: redirect_url_example
redirect_response_code: MOVED_PERMANENTLY
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
response_header_manipulation:
- # required
action: EXTEND_HTTP_RESPONSE_HEADER
header: header_example
value: value_example
address_rate_limiting:
# required
is_enabled: true
# optional
allowed_rate_per_address: 56
max_delayed_count_per_address: 56
block_response_code: 56
captchas:
- # required
url: url_example
session_expiration_in_seconds: 56
title: title_example
failure_message: failure_message_example
submit_label: submit_label_example
# optional
header_text: header_text_example
footer_text: footer_text_example
device_fingerprint_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
max_address_count: 56
max_address_count_expiration_in_seconds: 56
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
human_interaction_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
failure_threshold_expiration_in_seconds: 56
interaction_threshold: 56
recording_period_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
is_nat_enabled: true
js_challenge:
# required
is_enabled: true
# optional
action: DETECT
failure_threshold: 56
action_expiration_in_seconds: 56
set_http_header:
# required
name: name_example
value: value_example
challenge_settings:
# optional
block_action: SET_RESPONSE_CODE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_description: block_error_page_description_example
block_error_page_code: block_error_page_code_example
captcha_title: captcha_title_example
captcha_header: captcha_header_example
captcha_footer: captcha_footer_example
captcha_submit_label: captcha_submit_label_example
are_redirects_challenged: true
criteria:
- # required
condition: URL_IS
value: value_example
# optional
is_case_sensitive: true
is_nat_enabled: true
origin: origin_example
caching_rules:
- # required
name: name_example
action: CACHE
criteria:
- # required
condition: URL_IS
value: value_example
# optional
key: key_example
caching_duration: caching_duration_example
is_client_caching_enabled: true
client_caching_duration: client_caching_duration_example
custom_protection_rules:
- # optional
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
action: DETECT
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
origin_groups: [ "origin_groups_example" ]
protection_settings:
# optional
block_action: SHOW_ERROR_PAGE
block_response_code: 56
block_error_page_message: block_error_page_message_example
block_error_page_code: block_error_page_code_example
block_error_page_description: block_error_page_description_example
max_argument_count: 56
max_name_length_per_argument: 56
max_total_name_length_of_arguments: 56
recommendations_period_in_days: 56
is_response_inspected: true
max_response_size_in_ki_b: 56
allowed_http_methods: [ "OPTIONS" ]
media_types: [ "media_types_example" ]
whitelists:
- # required
name: name_example
# optional
addresses: [ "addresses_example" ]
address_lists: [ "address_lists_example" ]
good_bots:
- # required
key: key_example
is_enabled: true
# optional
name: name_example
description: description_example
protection_rules:
- # optional
key: key_example
mod_security_rule_ids: [ "mod_security_rule_ids_example" ]
name: name_example
description: description_example
action: OFF
labels: [ "labels_example" ]
exclusions:
- # optional
target: REQUEST_COOKIES
exclusions: [ "exclusions_example" ]
threat_feeds:
- # optional
key: key_example
name: name_example
action: OFF
description: description_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Delete waas_policy
oci_waas_policy:
# required
waas_policy_id: "ocid1.waaspolicy.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
- name: Delete waas_policy using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_waas_policy:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name: display_name_example
state: absent
"""
RETURN = """
waas_policy:
description:
- Details of the WaasPolicy resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the WAAS policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the WAAS policy's compartment.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- The user-friendly name of the WAAS policy. The name can be changed and does not need to be unique.
returned: on success
type: str
sample: display_name_example
domain:
description:
- The web application domain that the WAAS policy protects.
returned: on success
type: str
sample: domain_example
additional_domains:
description:
- An array of additional domains for this web application.
returned: on success
type: list
sample: []
cname:
description:
- The CNAME record to add to your DNS configuration to route traffic for the domain, and all additional domains, through the WAF.
returned: on success
type: str
sample: cname_example
lifecycle_state:
description:
- The current lifecycle state of the WAAS policy.
returned: on success
type: str
sample: CREATING
time_created:
description:
- The date and time the policy was created, expressed in RFC 3339 timestamp format.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
origins:
description:
- "A map of host servers (origins) and their keys for the web application. Origin keys are used to associate origins to specific protection
rules. The key should be a user-friendly name for the host. **Examples:** `primary` or `secondary`."
returned: on success
type: complex
contains:
uri:
description:
- The URI of the origin. Does not support paths. Port numbers should be specified in the `httpPort` and `httpsPort` fields.
returned: on success
type: str
sample: uri_example
http_port:
description:
- "The HTTP port on the origin that the web application listens on. If unspecified, defaults to `80`. If `0` is specified - the origin
is not used for HTTP traffic."
returned: on success
type: int
sample: 56
https_port:
description:
- "The HTTPS port on the origin that the web application listens on. If unspecified, defaults to `443`. If `0` is specified - the origin
is not used for HTTPS traffic."
returned: on success
type: int
sample: 56
custom_headers:
description:
- A list of HTTP headers to forward to your origin.
returned: on success
type: complex
contains:
name:
description:
- The name of the header.
returned: on success
type: str
sample: name_example
value:
description:
- The value of the header.
returned: on success
type: str
sample: value_example
origin_groups:
description:
- The map of origin groups and their keys used to associate origins to the `wafConfig`. Origin groups allow you to apply weights to groups of
origins for load balancing purposes. Origins with higher weights will receive larger proportions of client requests.
returned: on success
type: complex
contains:
origins:
description:
- The list of objects containing origin references and additional properties.
returned: on success
type: complex
contains:
origin:
description:
- The IP address or CIDR notation of the origin server.
returned: on success
type: str
sample: origin_example
weight:
description:
- The weight of the origin used in load balancing. Origins with higher weights will receive larger proportions of client
requests.
returned: on success
type: int
sample: 56
policy_config:
description:
- ""
returned: on success
type: complex
contains:
certificate_id:
description:
- The OCID of the SSL certificate to use if HTTPS is supported.
returned: on success
type: str
sample: "ocid1.certificate.oc1..xxxxxxEXAMPLExxxxxx"
is_https_enabled:
description:
- Enable or disable HTTPS support. If true, a `certificateId` is required. If unspecified, defaults to `false`.
returned: on success
type: bool
sample: true
is_https_forced:
description:
- Force HTTP to HTTPS redirection. If unspecified, defaults to `false`.
returned: on success
type: bool
sample: true
tls_protocols:
description:
- "A list of allowed TLS protocols. Only applicable when HTTPS support is enabled.
The TLS protocol is negotiated while the request is connecting and the most recent protocol supported by both the edge node and client
browser will be selected. If no such version exists, the connection will be aborted.
- **TLS_V1:** corresponds to TLS 1.0 specification."
- "- **TLS_V1_1:** corresponds to TLS 1.1 specification."
- "- **TLS_V1_2:** corresponds to TLS 1.2 specification."
- "- **TLS_V1_3:** corresponds to TLS 1.3 specification."
- Enabled TLS protocols must go in a row. For example if `TLS_v1_1` and `TLS_V1_3` are enabled, `TLS_V1_2` must be enabled too.
returned: on success
type: list
sample: []
is_origin_compression_enabled:
description:
- "Enable or disable GZIP compression of origin responses. If enabled, the header `Accept-Encoding: gzip` is sent to origin, otherwise,
the empty `Accept-Encoding:` header is used."
returned: on success
type: bool
sample: true
is_behind_cdn:
description:
- Enabling `isBehindCdn` allows for the collection of IP addresses from client requests if the WAF is connected to a CDN.
returned: on success
type: bool
sample: true
client_address_header:
description:
- Specifies an HTTP header name which is treated as the connecting client's IP address. Applicable only if `isBehindCdn` is enabled.
- The edge node reads this header and its value and sets the client IP address as specified. It does not create the header if the header
is not present in the request. If the header is not present, the connecting IP address will be used as the client's true IP address.
It uses the last IP address in the header's value as the true IP address.
- "Example: `X-Client-Ip: 11.1.1.1, 13.3.3.3`"
- In the case of multiple headers with the same name, only the first header will be used. It is assumed that CDN sets the correct client
IP address to prevent spoofing.
- "- **X_FORWARDED_FOR:** Corresponds to `X-Forwarded-For` header name."
- "- **X_CLIENT_IP:** Corresponds to `X-Client-Ip` header name."
- "- **X_REAL_IP:** Corresponds to `X-Real-Ip` header name."
- "- **CLIENT_IP:** Corresponds to `Client-Ip` header name."
- "- **TRUE_CLIENT_IP:** Corresponds to `True-Client-Ip` header name."
returned: on success
type: str
sample: X_FORWARDED_FOR
is_cache_control_respected:
description:
- "Enable or disable automatic content caching based on the response `cache-control` header. This feature enables the origin to act as a
proxy cache. Caching is usually defined using `cache-control` header. For example `cache-control: max-age=120` means that the returned
resource is valid for 120 seconds. Caching rules will overwrite this setting."
returned: on success
type: bool
sample: true
is_response_buffering_enabled:
description:
- Enable or disable buffering of responses from the origin. Buffering improves overall stability in case of network issues, but slightly
increases Time To First Byte.
returned: on success
type: bool
sample: true
cipher_group:
description:
- "The set cipher group for the configured TLS protocol. This sets the configuration for the TLS connections between clients and edge
nodes only.
- **DEFAULT:** Cipher group supports TLS 1.0, TLS 1.1, TLS 1.2, TLS 1.3 protocols. It has the following ciphers enabled: `ECDHE-RSA-
AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-
SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-
AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-
RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-
SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:!DES-
CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA`"
returned: on success
type: str
sample: DEFAULT
load_balancing_method:
description:
- An object that represents a load balancing method and its properties.
returned: on success
type: complex
contains:
method:
description:
- Load balancing methods are algorithms used to efficiently distribute traffic among origin servers.
- "- **L(IP_HASH,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/IPHashLoadBalancingMethod):** All the
incoming requests from the same client IP address should go to the same content origination server. IP_HASH load balancing
method uses origin weights when choosing which origin should the hash be assigned to initially."
- "- **L(ROUND_ROBIN,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/RoundRobinLoadBalancingMethod):**
Forwards requests sequentially to the available origin servers. The first request - to the first origin server, the second
request - to the next origin server, and so on. After it sends a request to the last origin server, it starts again with the
first origin server. When using weights on origins, Weighted Round Robin assigns more requests to origins with a greater
weight. Over a period of time, origins will receive a number of requests in proportion to their weight."
- "- **L(STICKY_COOKIE,https://docs.cloud.oracle.com/iaas/api/#/en/waas/latest/datatypes/StickyCookieLoadBalancingMethod):**
Adds a session cookie to the first response from the origin server and identifies the server that sent the response. The
client's next request contains the cookie value, and nginx routes the request to the origin server that responded to the first
request. STICKY_COOKIE load balancing method falls back to Round Robin for the first request."
returned: on success
type: str
sample: IP_HASH
name:
description:
- The name of the cookie used to track the persistence.
Can contain any US-ASCII character except separator or control character.
returned: on success
type: str
sample: name_example
domain:
description:
- The domain for which the cookie is set, defaults to WAAS policy domain.
returned: on success
type: str
sample: domain_example
expiration_time_in_seconds:
description:
- The time for which a browser should keep the cookie in seconds.
Empty value will cause the cookie to expire at the end of a browser session.
returned: on success
type: int
sample: 56
websocket_path_prefixes:
description:
- ModSecurity is not capable to inspect WebSockets. Therefore paths specified here have WAF disabled if Connection request header from
the client has the value Upgrade (case insensitive matching) and Upgrade request header has the value websocket (case insensitive
matching). Paths matches if the concatenation of request URL path and query starts with the contents of the one of
`websocketPathPrefixes` array value. In All other cases challenges, like JSC, HIC and etc., remain active.
returned: on success
type: list
sample: []
is_sni_enabled:
description:
- SNI stands for Server Name Indication and is an extension of the TLS protocol. It indicates which hostname is being contacted by the
browser at the beginning of the 'handshake'-process. This allows a server to connect multiple SSL Certificates to one IP address and
port.
returned: on success
type: bool
sample: true
health_checks:
description:
- ""
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables or disables the health checks.
returned: on success
type: bool
sample: true
method:
description:
- An HTTP verb (i.e. HEAD, GET, or POST) to use when performing the health check.
returned: on success
type: str
sample: GET
path:
description:
- Path to visit on your origins when performing the health check.
returned: on success
type: str
sample: path_example
headers:
description:
- "HTTP header fields to include in health check requests, expressed as `\\"name\\": \\"value\\"` properties. Because HTTP
header field names are case-insensitive, any use of names that are case-insensitive equal to other names will be rejected. If
Host is not specified, requests will include a Host header field with value matching the policy's protected domain. If User-
Agent is not specified, requests will include a User-Agent header field with value \\"waf health checks\\"."
- "**Note:** The only currently-supported header fields are Host and User-Agent."
returned: on success
type: dict
sample: {}
expected_response_code_group:
description:
- "The HTTP response codes that signify a healthy state.
- **2XX:** Success response code group.
- **3XX:** Redirection response code group.
- **4XX:** Client errors response code group.
- **5XX:** Server errors response code group."
returned: on success
type: list
sample: []
is_response_text_check_enabled:
description:
- Enables or disables additional check for predefined text in addition to response code.
returned: on success
type: bool
sample: true
expected_response_text:
description:
- Health check will search for the given text in a case-sensitive manner within the response body and will fail if the text is
not found.
returned: on success
type: str
sample: expected_response_text_example
interval_in_seconds:
description:
- Time between health checks of an individual origin server, in seconds.
returned: on success
type: int
sample: 56
timeout_in_seconds:
description:
- Response timeout represents wait time until request is considered failed, in seconds.
returned: on success
type: int
sample: 56
healthy_threshold:
description:
- Number of successful health checks after which the server is marked up.
returned: on success
type: int
sample: 56
unhealthy_threshold:
description:
- Number of failed health checks after which the server is marked down.
returned: on success
type: int
sample: 56
waf_config:
description:
- ""
returned: on success
type: complex
contains:
access_rules:
description:
- The access rules applied to the Web Application Firewall. Used for defining custom access policies with the combination of `ALLOW`,
`DETECT`, and `BLOCK` rules, based on different criteria.
returned: on success
type: complex
contains:
name:
description:
- The unique name of the access rule.
returned: on success
type: str
sample: name_example
criteria:
description:
- The list of access rule criteria. The rule would be applied only for the requests that matched all the listed conditions.
returned: on success
type: complex
contains:
condition:
description:
- "The criteria the access rule and JavaScript Challenge uses to determine if action should be taken on a request.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field. URL must start with a `/`.
- **URL_IS_NOT:** Matches if the concatenation of request URL path and query is not identical to the contents of the
`value` field. URL must start with a `/`.
- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the
`value` field. URL must start with a `/`.
- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the
`value` field.
- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the
`value` field.
- **URL_REGEX:** Matches if the concatenation of request URL path and query is described by the regular expression in
the value field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_MATCH_REGEX:** Matches if the concatenation of request URL path and query is not described by the
regular expression in the `value` field. The value must be a valid regular expression recognized by the PCRE library
in Nginx (https://www.pcre.org).
- **URL_DOES_NOT_START_WITH:** Matches if the concatenation of request URL path and query does not start with the
contents of the `value` field.
- **URL_PART_DOES_NOT_CONTAIN:** Matches if the concatenation of request URL path and query does not contain the
contents of the `value` field.
- **URL_PART_DOES_NOT_END_WITH:** Matches if the concatenation of request URL path and query does not end with the
contents of the `value` field.
- **IP_IS:** Matches if the request originates from one of the IP addresses contained in the defined address list. The
`value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IS_NOT:** Matches if the request does not originate from any of the IP addresses contained in the defined
address list. The `value` in this case is string with one or multiple IPs or CIDR notations separated by new line
symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IN_LIST:** Matches if the request originates from one of the IP addresses contained in the referenced address
list. The `value` in this case is OCID of the address list.
- **IP_NOT_IN_LIST:** Matches if the request does not originate from any IP address contained in the referenced
address list. The `value` field in this case is OCID of the address list.
- **HTTP_HEADER_CONTAINS:** The HTTP_HEADER_CONTAINS criteria is defined using a compound value separated by a colon:
a header field name and a header field value. `host:test.example.com` is an example of a criteria value where `host`
is the header field name and `test.example.com` is the header field value. A request matches when the header field
name is a case insensitive match and the header field value is a case insensitive, substring match.
*Example:* With a criteria value of `host:test.example.com`, where `host` is the name of the field and
`test.example.com` is the value of the host field, a request with the header values, `Host: www.test.example.com` will
match, where as a request with header values of `host: www.example.com` or `host: test.sub.example.com` will not
match.
- **HTTP_METHOD_IS:** Matches if the request method is identical to one of the values listed in field. The `value` in
this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available
methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **HTTP_METHOD_IS_NOT:** Matches if the request is not identical to any of the contents of the `value` field. The
`value` in this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of
available methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **COUNTRY_IS:** Matches if the request originates from one of countries in the `value` field. The `value` in this
case is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1
alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **COUNTRY_IS_NOT:** Matches if the request does not originate from any of countries in the `value` field. The
`value` in this case is string with one or multiple countries separated by new line symbol \\\\n Country codes are
in ISO 3166-1 alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **USER_AGENT_IS:** Matches if the requesting user agent is identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`
- **USER_AGENT_IS_NOT:** Matches if the requesting user agent is not identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`"
returned: on success
type: str
sample: URL_IS
value:
description:
- The criteria value.
returned: on success
type: str
sample: value_example
is_case_sensitive:
description:
- When enabled, the condition will be matched with case-sensitive rules.
returned: on success
type: bool
sample: true
action:
description:
- The action to take when the access criteria are met for a rule. If unspecified, defaults to `ALLOW`.
- "- **ALLOW:** Takes no action, just logs the request."
- "- **DETECT:** Takes no action, but creates an alert for the request."
- "- **BLOCK:** Blocks the request by returning specified response code or showing error page."
- "- **BYPASS:** Bypasses some or all challenges."
- "- **REDIRECT:** Redirects the request to the specified URL. These fields are required when `REDIRECT` is selected:
`redirectUrl`, `redirectResponseCode`."
- "- **SHOW_CAPTCHA:** Show a CAPTCHA Challenge page instead of the requested page."
- Regardless of action, no further rules are processed once a rule is matched.
returned: on success
type: str
sample: ALLOW
block_action:
description:
- The method used to block requests if `action` is set to `BLOCK` and the access criteria are met. If unspecified, defaults to
`SET_RESPONSE_CODE`.
returned: on success
type: str
sample: SET_RESPONSE_CODE
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE`, and the
access criteria are met. If unspecified, defaults to `403`. The list of available response codes: `200`, `201`, `202`, `204`,
`206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`, `408`, `409`, `411`, `412`, `413`, `414`,
`415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`, `502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
access criteria are met. If unspecified, defaults to 'Access to the website is blocked.'
returned: on success
type: str
sample: block_error_page_message_example
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
access criteria are met. If unspecified, defaults to 'Access rules'.
returned: on success
type: str
sample: block_error_page_code_example
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and
the access criteria are met. If unspecified, defaults to 'Access blocked by website owner. Please contact support.'
returned: on success
type: str
sample: block_error_page_description_example
bypass_challenges:
description:
- The list of challenges to bypass when `action` is set to `BYPASS`. If unspecified or empty, all challenges are bypassed.
- "- **JS_CHALLENGE:** Bypasses JavaScript Challenge."
- "- **DEVICE_FINGERPRINT_CHALLENGE:** Bypasses Device Fingerprint Challenge."
- "- **HUMAN_INTERACTION_CHALLENGE:** Bypasses Human Interaction Challenge."
- "- **CAPTCHA:** Bypasses CAPTCHA Challenge."
returned: on success
type: list
sample: []
redirect_url:
description:
- The target to which the request should be redirected, represented as a URI reference. Required when `action` is `REDIRECT`.
returned: on success
type: str
sample: redirect_url_example
redirect_response_code:
description:
- The response status code to return when `action` is set to `REDIRECT`.
- "- **MOVED_PERMANENTLY:** Used for designating the permanent movement of a page (numerical code - 301)."
- "- **FOUND:** Used for designating the temporary movement of a page (numerical code - 302)."
returned: on success
type: str
sample: MOVED_PERMANENTLY
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is challenged.
returned: on success
type: str
sample: captcha_title_example
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
returned: on success
type: str
sample: captcha_header_example
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
returned: on success
type: str
sample: captcha_footer_example
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `SHOW_CAPTCHA` and the request is
challenged.
returned: on success
type: str
sample: captcha_submit_label_example
response_header_manipulation:
description:
- An object that represents an action to apply to an HTTP response headers if all rule criteria will be matched regardless of
`action` value.
returned: on success
type: complex
contains:
action:
description:
- ""
returned: on success
type: str
sample: EXTEND_HTTP_RESPONSE_HEADER
header:
description:
- A header field name that conforms to RFC 7230.
- "Example: `example_header_name`"
returned: on success
type: str
sample: header_example
value:
description:
- A header field value that conforms to RFC 7230.
- "Example: `example_value`"
returned: on success
type: str
sample: value_example
address_rate_limiting:
description:
- The IP address rate limiting settings used to limit the number of requests from an address.
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables or disables the address rate limiting Web Application Firewall feature.
returned: on success
type: bool
sample: true
allowed_rate_per_address:
description:
- The number of allowed requests per second from one IP address. If unspecified, defaults to `1`.
returned: on success
type: int
sample: 56
max_delayed_count_per_address:
description:
- The maximum number of requests allowed to be queued before subsequent requests are dropped. If unspecified, defaults to `10`.
returned: on success
type: int
sample: 56
block_response_code:
description:
- "The response status code returned when a request is blocked. If unspecified, defaults to `503`. The list of available
response codes: `400`, `401`, `403`, `404`, `405`, `408`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `494`,
`495`, `496`, `497`, `499`, `500`, `501`, `502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
captchas:
description:
- A list of CAPTCHA challenge settings. These are used to challenge requests with a CAPTCHA to block bots.
returned: on success
type: complex
contains:
url:
description:
- The unique URL path at which to show the CAPTCHA challenge.
returned: on success
type: str
sample: url_example
session_expiration_in_seconds:
description:
- The amount of time before the CAPTCHA expires, in seconds. If unspecified, defaults to `300`.
returned: on success
type: int
sample: 56
title:
description:
- The title used when displaying a CAPTCHA challenge. If unspecified, defaults to `Are you human?`
returned: on success
type: str
sample: title_example
header_text:
description:
- The text to show in the header when showing a CAPTCHA challenge. If unspecified, defaults to 'We have detected an increased
number of attempts to access this website. To help us keep this site secure, please let us know that you are not a robot by
entering the text from the image below.'
returned: on success
type: str
sample: header_text_example
footer_text:
description:
- The text to show in the footer when showing a CAPTCHA challenge. If unspecified, defaults to 'Enter the letters and numbers as
they are shown in the image above.'
returned: on success
type: str
sample: footer_text_example
failure_message:
description:
- The text to show when incorrect CAPTCHA text is entered. If unspecified, defaults to `The CAPTCHA was incorrect. Try again.`
returned: on success
type: str
sample: failure_message_example
submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button. If unspecified, defaults to `Yes, I am human`.
returned: on success
type: str
sample: submit_label_example
device_fingerprint_challenge:
description:
- The device fingerprint challenge settings. Used to detect unique devices based on the device fingerprint information collected in
order to block bots.
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables or disables the device fingerprint challenge Web Application Firewall feature.
returned: on success
type: bool
sample: true
action:
description:
- The action to take on requests from detected bots. If unspecified, defaults to `DETECT`.
returned: on success
type: str
sample: DETECT
failure_threshold:
description:
- The number of failed requests allowed before taking action. If unspecified, defaults to `10`.
returned: on success
type: int
sample: 56
action_expiration_in_seconds:
description:
- The number of seconds between challenges for the same IP address. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
failure_threshold_expiration_in_seconds:
description:
- The number of seconds before the failure threshold resets. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
max_address_count:
description:
- The maximum number of IP addresses permitted with the same device fingerprint. If unspecified, defaults to `20`.
returned: on success
type: int
sample: 56
max_address_count_expiration_in_seconds:
description:
- The number of seconds before the maximum addresses count resets. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
challenge_settings:
description:
- ""
returned: on success
type: complex
contains:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
returned: on success
type: str
sample: SET_RESPONSE_CODE
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response
codes: `200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`,
`408`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`,
`502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and
the request is blocked. If unspecified, defaults to `Access to the website is blocked`.
returned: on success
type: str
sample: block_error_page_message_example
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please
contact support.`
returned: on success
type: str
sample: block_error_page_description_example
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`
and the request is blocked. If unspecified, defaults to `403`.
returned: on success
type: str
sample: block_error_page_code_example
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Are you human?`
returned: on success
type: str
sample: captcha_title_example
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of
attempts to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by
entering the text from captcha below.`
returned: on success
type: str
sample: captcha_header_example
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are
shown in image above`.
returned: on success
type: str
sample: captcha_footer_example
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is
set to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
returned: on success
type: str
sample: captcha_submit_label_example
good_bots:
description:
- A list of bots allowed to access the web application.
returned: on success
type: complex
contains:
key:
description:
- The unique key for the bot.
returned: on success
type: str
sample: key_example
name:
description:
- The bot name.
returned: on success
type: str
sample: name_example
is_enabled:
description:
- Enables or disables the bot.
returned: on success
type: bool
sample: true
description:
description:
- The description of the bot.
returned: on success
type: str
sample: description_example
human_interaction_challenge:
description:
- The human interaction challenge settings. Used to look for natural human interactions such as mouse movements, time on site, and page
scrolling to identify bots.
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables or disables the human interaction challenge Web Application Firewall feature.
returned: on success
type: bool
sample: true
action:
description:
- The action to take against requests from detected bots. If unspecified, defaults to `DETECT`.
returned: on success
type: str
sample: DETECT
failure_threshold:
description:
- The number of failed requests before taking action. If unspecified, defaults to `10`.
returned: on success
type: int
sample: 56
action_expiration_in_seconds:
description:
- The number of seconds between challenges for the same IP address. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
failure_threshold_expiration_in_seconds:
description:
- The number of seconds before the failure threshold resets. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
interaction_threshold:
description:
- The number of interactions required to pass the challenge. If unspecified, defaults to `3`.
returned: on success
type: int
sample: 56
recording_period_in_seconds:
description:
- The number of seconds to record the interactions from the user. If unspecified, defaults to `15`.
returned: on success
type: int
sample: 56
set_http_header:
description:
- Adds an additional HTTP header to requests that fail the challenge before being passed to the origin. Only applicable when the
`action` is set to `DETECT`.
returned: on success
type: complex
contains:
name:
description:
- The name of the header.
returned: on success
type: str
sample: name_example
value:
description:
- The value of the header.
returned: on success
type: str
sample: value_example
challenge_settings:
description:
- ""
returned: on success
type: complex
contains:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
returned: on success
type: str
sample: SET_RESPONSE_CODE
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response
codes: `200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`,
`408`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`,
`502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and
the request is blocked. If unspecified, defaults to `Access to the website is blocked`.
returned: on success
type: str
sample: block_error_page_message_example
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please
contact support.`
returned: on success
type: str
sample: block_error_page_description_example
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`
and the request is blocked. If unspecified, defaults to `403`.
returned: on success
type: str
sample: block_error_page_code_example
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Are you human?`
returned: on success
type: str
sample: captcha_title_example
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of
attempts to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by
entering the text from captcha below.`
returned: on success
type: str
sample: captcha_header_example
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are
shown in image above`.
returned: on success
type: str
sample: captcha_footer_example
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is
set to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
returned: on success
type: str
sample: captcha_submit_label_example
is_nat_enabled:
description:
- When enabled, the user is identified not only by the IP address but also by an unique additional hash, which prevents blocking
visitors with shared IP addresses.
returned: on success
type: bool
sample: true
js_challenge:
description:
- The JavaScript challenge settings. Used to challenge requests with a JavaScript challenge and take the action if a browser has no
JavaScript support in order to block bots.
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables or disables the JavaScript challenge Web Application Firewall feature.
returned: on success
type: bool
sample: true
action:
description:
- The action to take against requests from detected bots. If unspecified, defaults to `DETECT`.
returned: on success
type: str
sample: DETECT
failure_threshold:
description:
- The number of failed requests before taking action. If unspecified, defaults to `10`.
returned: on success
type: int
sample: 56
action_expiration_in_seconds:
description:
- The number of seconds between challenges from the same IP address. If unspecified, defaults to `60`.
returned: on success
type: int
sample: 56
set_http_header:
description:
- Adds an additional HTTP header to requests that fail the challenge before being passed to the origin. Only applicable when the
`action` is set to `DETECT`.
returned: on success
type: complex
contains:
name:
description:
- The name of the header.
returned: on success
type: str
sample: name_example
value:
description:
- The value of the header.
returned: on success
type: str
sample: value_example
challenge_settings:
description:
- ""
returned: on success
type: complex
contains:
block_action:
description:
- The method used to block requests that fail the challenge, if `action` is set to `BLOCK`. If unspecified, defaults to
`SHOW_ERROR_PAGE`.
returned: on success
type: str
sample: SET_RESPONSE_CODE
block_response_code:
description:
- "The response status code to return when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE` or
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `403`. The list of available response
codes: `200`, `201`, `202`, `204`, `206`, `300`, `301`, `302`, `303`, `304`, `307`, `400`, `401`, `403`, `404`, `405`,
`408`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `422`, `444`, `494`, `495`, `496`, `497`, `499`, `500`, `501`,
`502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and
the request is blocked. If unspecified, defaults to `Access to the website is blocked`.
returned: on success
type: str
sample: block_error_page_message_example
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_ERROR_PAGE`, and the request is blocked. If unspecified, defaults to `Access blocked by website owner. Please
contact support.`
returned: on success
type: str
sample: block_error_page_description_example
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`
and the request is blocked. If unspecified, defaults to `403`.
returned: on success
type: str
sample: block_error_page_code_example
captcha_title:
description:
- The title used when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set to
`SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Are you human?`
returned: on success
type: str
sample: captcha_title_example
captcha_header:
description:
- The text to show in the header when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `We have detected an increased number of
attempts to access this webapp. To help us keep this webapp secure, please let us know that you are not a robot by
entering the text from captcha below.`
returned: on success
type: str
sample: captcha_header_example
captcha_footer:
description:
- The text to show in the footer when showing a CAPTCHA challenge when `action` is set to `BLOCK`, `blockAction` is set
to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, default to `Enter the letters and numbers as they are
shown in image above`.
returned: on success
type: str
sample: captcha_footer_example
captcha_submit_label:
description:
- The text to show on the label of the CAPTCHA challenge submit button when `action` is set to `BLOCK`, `blockAction` is
set to `SHOW_CAPTCHA`, and the request is blocked. If unspecified, defaults to `Yes, I am human`.
returned: on success
type: str
sample: captcha_submit_label_example
are_redirects_challenged:
description:
- When enabled, redirect responses from the origin will also be challenged. This will change HTTP 301/302 responses from origin
to HTTP 200 with an HTML body containing JavaScript page redirection.
returned: on success
type: bool
sample: true
criteria:
description:
- When defined, the JavaScript Challenge would be applied only for the requests that matched all the listed conditions.
returned: on success
type: complex
contains:
condition:
description:
- "The criteria the access rule and JavaScript Challenge uses to determine if action should be taken on a request.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field. URL must start with a `/`.
- **URL_IS_NOT:** Matches if the concatenation of request URL path and query is not identical to the contents of the
`value` field. URL must start with a `/`.
- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the
`value` field. URL must start with a `/`.
- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the
`value` field.
- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the
`value` field.
- **URL_REGEX:** Matches if the concatenation of request URL path and query is described by the regular expression in
the value field. The value must be a valid regular expression recognized by the PCRE library in Nginx
(https://www.pcre.org).
- **URL_DOES_NOT_MATCH_REGEX:** Matches if the concatenation of request URL path and query is not described by the
regular expression in the `value` field. The value must be a valid regular expression recognized by the PCRE library
in Nginx (https://www.pcre.org).
- **URL_DOES_NOT_START_WITH:** Matches if the concatenation of request URL path and query does not start with the
contents of the `value` field.
- **URL_PART_DOES_NOT_CONTAIN:** Matches if the concatenation of request URL path and query does not contain the
contents of the `value` field.
- **URL_PART_DOES_NOT_END_WITH:** Matches if the concatenation of request URL path and query does not end with the
contents of the `value` field.
- **IP_IS:** Matches if the request originates from one of the IP addresses contained in the defined address list. The
`value` in this case is string with one or multiple IPs or CIDR notations separated by new line symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IS_NOT:** Matches if the request does not originate from any of the IP addresses contained in the defined
address list. The `value` in this case is string with one or multiple IPs or CIDR notations separated by new line
symbol \\\\n
*Example:* \\"1.1.1.1\\\\n1.1.1.2\\\\n1.2.2.1/30\\"
- **IP_IN_LIST:** Matches if the request originates from one of the IP addresses contained in the referenced address
list. The `value` in this case is OCID of the address list.
- **IP_NOT_IN_LIST:** Matches if the request does not originate from any IP address contained in the referenced
address list. The `value` field in this case is OCID of the address list.
- **HTTP_HEADER_CONTAINS:** The HTTP_HEADER_CONTAINS criteria is defined using a compound value separated by a colon:
a header field name and a header field value. `host:test.example.com` is an example of a criteria value where `host`
is the header field name and `test.example.com` is the header field value. A request matches when the header field
name is a case insensitive match and the header field value is a case insensitive, substring match.
*Example:* With a criteria value of `host:test.example.com`, where `host` is the name of the field and
`test.example.com` is the value of the host field, a request with the header values, `Host: www.test.example.com` will
match, where as a request with header values of `host: www.example.com` or `host: test.sub.example.com` will not
match.
- **HTTP_METHOD_IS:** Matches if the request method is identical to one of the values listed in field. The `value` in
this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of available
methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **HTTP_METHOD_IS_NOT:** Matches if the request is not identical to any of the contents of the `value` field. The
`value` in this case is string with one or multiple HTTP methods separated by new line symbol \\\\n The list of
available methods: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`"
- "*Example:* \\"GET\\\\nPOST\\""
- "- **COUNTRY_IS:** Matches if the request originates from one of countries in the `value` field. The `value` in this
case is string with one or multiple countries separated by new line symbol \\\\n Country codes are in ISO 3166-1
alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **COUNTRY_IS_NOT:** Matches if the request does not originate from any of countries in the `value` field. The
`value` in this case is string with one or multiple countries separated by new line symbol \\\\n Country codes are
in ISO 3166-1 alpha-2 format. For a list of codes, see L(ISO's website,https://www.iso.org/obp/ui/#search/code/).
*Example:* \\"AL\\\\nDZ\\\\nAM\\"
- **USER_AGENT_IS:** Matches if the requesting user agent is identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`
- **USER_AGENT_IS_NOT:** Matches if the requesting user agent is not identical to the contents of the `value` field.
*Example:* `Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0`"
returned: on success
type: str
sample: URL_IS
value:
description:
- The criteria value.
returned: on success
type: str
sample: value_example
is_case_sensitive:
description:
- When enabled, the condition will be matched with case-sensitive rules.
returned: on success
type: bool
sample: true
is_nat_enabled:
description:
- When enabled, the user is identified not only by the IP address but also by an unique additional hash, which prevents blocking
visitors with shared IP addresses.
returned: on success
type: bool
sample: true
origin:
description:
- The key in the map of origins referencing the origin used for the Web Application Firewall. The origin must already be included in
`Origins`. Required when creating the `WafConfig` resource, but not on update.
returned: on success
type: str
sample: origin_example
caching_rules:
description:
- A list of caching rules applied to the web application.
returned: on success
type: complex
contains:
key:
description:
- The unique key for the caching rule.
returned: on success
type: str
sample: key_example
name:
description:
- The name of the caching rule.
returned: on success
type: str
sample: name_example
action:
description:
- "The action to take when the criteria of a caching rule are met.
- **CACHE:** Caches requested content when the criteria of the rule are met."
- "- **BYPASS_CACHE:** Allows requests to bypass the cache and be directed to the origin when the criteria of the rule is met."
returned: on success
type: str
sample: CACHE
caching_duration:
description:
- "The duration to cache content for the caching rule, specified in ISO 8601 extended format. Supported units: seconds, minutes,
hours, days, weeks, months. The maximum value that can be set for any unit is `99`. Mixing of multiple units is not supported.
Only applies when the `action` is set to `CACHE`.
Example: `PT1H`"
returned: on success
type: str
sample: caching_duration_example
is_client_caching_enabled:
description:
- Enables or disables client caching.
Browsers use the `Cache-Control` header value for caching content locally in the browser. This setting overrides the addition
of a `Cache-Control` header in responses.
returned: on success
type: bool
sample: true
client_caching_duration:
description:
- "The duration to cache content in the user's browser, specified in ISO 8601 extended format. Supported units: seconds,
minutes, hours, days, weeks, months. The maximum value that can be set for any unit is `99`. Mixing of multiple units is not
supported. Only applies when the `action` is set to `CACHE`.
Example: `PT1H`"
returned: on success
type: str
sample: client_caching_duration_example
criteria:
description:
- The array of the rule criteria with condition and value. The caching rule would be applied for the requests that matched any
of the listed conditions.
returned: on success
type: complex
contains:
condition:
description:
- "The condition of the caching rule criteria.
- **URL_IS:** Matches if the concatenation of request URL path and query is identical to the contents of the `value`
field."
- "- **URL_STARTS_WITH:** Matches if the concatenation of request URL path and query starts with the contents of the
`value` field."
- "- **URL_PART_ENDS_WITH:** Matches if the concatenation of request URL path and query ends with the contents of the
`value` field."
- "- **URL_PART_CONTAINS:** Matches if the concatenation of request URL path and query contains the contents of the
`value` field."
- URLs must start with a `/`. URLs can't contain restricted double slashes `//`. URLs can't contain the restricted `'`
`&` `?` symbols. Resources to cache can only be specified by a URL, any query parameters are ignored.
returned: on success
type: str
sample: URL_IS
value:
description:
- The value of the caching rule criteria.
returned: on success
type: str
sample: value_example
custom_protection_rules:
description:
- A list of the custom protection rule OCIDs and their actions.
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
action:
description:
- "The action to take when the custom protection rule is triggered.
`DETECT` - Logs the request when the criteria of the custom protection rule are met. `BLOCK` - Blocks the request when the
criteria of the custom protection rule are met."
returned: on success
type: str
sample: DETECT
exclusions:
description:
- ""
returned: on success
type: complex
contains:
target:
description:
- The target of the exclusion.
returned: on success
type: str
sample: REQUEST_COOKIES
exclusions:
description:
- ""
returned: on success
type: list
sample: []
origin_groups:
description:
- The map of origin groups and their keys used to associate origins to the `wafConfig`. Origin groups allow you to apply weights to
groups of origins for load balancing purposes. Origins with higher weights will receive larger proportions of client requests.
To add additional origins to your WAAS policy, update the `origins` field of a `UpdateWaasPolicy` request.
returned: on success
type: list
sample: []
protection_rules:
description:
- A list of the protection rules and their details.
returned: on success
type: complex
contains:
key:
description:
- The unique key of the protection rule.
returned: on success
type: str
sample: key_example
mod_security_rule_ids:
description:
- The list of the ModSecurity rule IDs that apply to this protection rule. For more information about ModSecurity's open source
WAF rules, see L(Mod Security's documentation,https://www.modsecurity.org/CRS/Documentation/index.html).
returned: on success
type: list
sample: []
name:
description:
- The name of the protection rule.
returned: on success
type: str
sample: name_example
description:
description:
- The description of the protection rule.
returned: on success
type: str
sample: description_example
action:
description:
- The action to take when the traffic is detected as malicious. If unspecified, defaults to `OFF`.
returned: on success
type: str
sample: OFF
labels:
description:
- The list of labels for the protection rule.
- "**Note:** Protection rules with a `ResponseBody` label will have no effect unless `isResponseInspected` is true."
returned: on success
type: list
sample: []
exclusions:
description:
- ""
returned: on success
type: complex
contains:
target:
description:
- The target of the exclusion.
returned: on success
type: str
sample: REQUEST_COOKIES
exclusions:
description:
- ""
returned: on success
type: list
sample: []
protection_settings:
description:
- The settings to apply to protection rules.
returned: on success
type: complex
contains:
block_action:
description:
- If `action` is set to `BLOCK`, this specifies how the traffic is blocked when detected as malicious by a protection rule. If
unspecified, defaults to `SET_RESPONSE_CODE`.
returned: on success
type: str
sample: SHOW_ERROR_PAGE
block_response_code:
description:
- "The response code returned when `action` is set to `BLOCK`, `blockAction` is set to `SET_RESPONSE_CODE`, and the traffic is
detected as malicious by a protection rule. If unspecified, defaults to `403`. The list of available response codes: `400`,
`401`, `403`, `405`, `409`, `411`, `412`, `413`, `414`, `415`, `416`, `500`, `501`, `502`, `503`, `504`, `507`."
returned: on success
type: int
sample: 56
block_error_page_message:
description:
- The message to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
traffic is detected as malicious by a protection rule. If unspecified, defaults to 'Access to the website is blocked.'
returned: on success
type: str
sample: block_error_page_message_example
block_error_page_code:
description:
- The error code to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and the
traffic is detected as malicious by a protection rule. If unspecified, defaults to `403`.
returned: on success
type: str
sample: block_error_page_code_example
block_error_page_description:
description:
- The description text to show on the error page when `action` is set to `BLOCK`, `blockAction` is set to `SHOW_ERROR_PAGE`, and
the traffic is detected as malicious by a protection rule. If unspecified, defaults to `Access blocked by website owner.
Please contact support.`
returned: on success
type: str
sample: block_error_page_description_example
max_argument_count:
description:
- "The maximum number of arguments allowed to be passed to your application before an action is taken. Arguements are query
parameters or body parameters in a PUT or POST request. If unspecified, defaults to `255`. This setting only applies if a
corresponding protection rule is enabled, such as the \\"Number of Arguments Limits\\" rule (key: 960335)."
- "Example: If `maxArgumentCount` to `2` for the Max Number of Arguments protection rule (key: 960335), the following requests
would be blocked:
`GET /myapp/path?query=one&query=two&query=three`
`POST /myapp/path` with Body `{\\"argument1\\":\\"one\\",\\"argument2\\":\\"two\\",\\"argument3\\":\\"three\\"}`"
returned: on success
type: int
sample: 56
max_name_length_per_argument:
description:
- "The maximum length allowed for each argument name, in characters. Arguements are query parameters or body parameters in a PUT
or POST request. If unspecified, defaults to `400`. This setting only applies if a corresponding protection rule is enabled,
such as the \\"Values Limits\\" rule (key: 960208)."
returned: on success
type: int
sample: 56
max_total_name_length_of_arguments:
description:
- "The maximum length allowed for the sum of the argument name and value, in characters. Arguements are query parameters or body
parameters in a PUT or POST request. If unspecified, defaults to `64000`. This setting only applies if a corresponding
protection rule is enabled, such as the \\"Total Arguments Limits\\" rule (key: 960341)."
returned: on success
type: int
sample: 56
recommendations_period_in_days:
description:
- The length of time to analyze traffic traffic, in days. After the analysis period, `WafRecommendations` will be populated. If
unspecified, defaults to `10`.
- Use `GET /waasPolicies/{waasPolicyId}/wafRecommendations` to view WAF recommendations.
returned: on success
type: int
sample: 56
is_response_inspected:
description:
- Inspects the response body of origin responses. Can be used to detect leakage of sensitive data. If unspecified, defaults to
`false`.
- "**Note:** Only origin responses with a Content-Type matching a value in `mediaTypes` will be inspected."
returned: on success
type: bool
sample: true
max_response_size_in_ki_b:
description:
- The maximum response size to be fully inspected, in binary kilobytes (KiB). Anything over this limit will be partially
inspected. If unspecified, defaults to `1024`.
returned: on success
type: int
sample: 56
allowed_http_methods:
description:
- "The list of allowed HTTP methods. If unspecified, default to `[OPTIONS, GET, HEAD, POST]`. This setting only applies if a
corresponding protection rule is enabled, such as the \\"Restrict HTTP Request Methods\\" rule (key: 911100)."
returned: on success
type: list
sample: []
media_types:
description:
- "The list of media types to allow for inspection, if `isResponseInspected` is enabled. Only responses with MIME types in this
list will be inspected. If unspecified, defaults to `[\\"text/html\\", \\"text/plain\\", \\"text/xml\\"]`."
- " Supported MIME types include:"
- " - text/html
- text/plain
- text/asp
- text/css
- text/x-script
- application/json
- text/webviewhtml
- text/x-java-source
- application/x-javascript
- application/javascript
- application/ecmascript
- text/javascript
- text/ecmascript
- text/x-script.perl
- text/x-script.phyton
- application/plain
- application/xml
- text/xml"
returned: on success
type: list
sample: []
threat_feeds:
description:
- A list of threat intelligence feeds and the actions to apply to known malicious traffic based on internet intelligence.
returned: on success
type: complex
contains:
key:
description:
- The unique key of the threat intelligence feed.
returned: on success
type: str
sample: key_example
name:
description:
- The name of the threat intelligence feed.
returned: on success
type: str
sample: name_example
action:
description:
- The action to take when traffic is flagged as malicious by data from the threat intelligence feed. If unspecified, defaults to
`OFF`.
returned: on success
type: str
sample: OFF
description:
description:
- The description of the threat intelligence feed.
returned: on success
type: str
sample: description_example
whitelists:
description:
- A list of IP addresses that bypass the Web Application Firewall.
returned: on success
type: complex
contains:
name:
description:
- The unique name of the whitelist.
returned: on success
type: str
sample: name_example
addresses:
description:
- A set of IP addresses or CIDR notations to include in the whitelist.
returned: on success
type: list
sample: []
address_lists:
description:
- A list of L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of IP address lists to include in the
whitelist.
returned: on success
type: list
sample: []
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"domain": "domain_example",
"additional_domains": [],
"cname": "cname_example",
"lifecycle_state": "CREATING",
"time_created": "2013-10-20T19:20:30+01:00",
"origins": {
"uri": "uri_example",
"http_port": 56,
"https_port": 56,
"custom_headers": [{
"name": "name_example",
"value": "value_example"
}]
},
"origin_groups": {
"origins": [{
"origin": "origin_example",
"weight": 56
}]
},
"policy_config": {
"certificate_id": "ocid1.certificate.oc1..xxxxxxEXAMPLExxxxxx",
"is_https_enabled": true,
"is_https_forced": true,
"tls_protocols": [],
"is_origin_compression_enabled": true,
"is_behind_cdn": true,
"client_address_header": "X_FORWARDED_FOR",
"is_cache_control_respected": true,
"is_response_buffering_enabled": true,
"cipher_group": "DEFAULT",
"load_balancing_method": {
"method": "IP_HASH",
"name": "name_example",
"domain": "domain_example",
"expiration_time_in_seconds": 56
},
"websocket_path_prefixes": [],
"is_sni_enabled": true,
"health_checks": {
"is_enabled": true,
"method": "GET",
"path": "path_example",
"headers": {},
"expected_response_code_group": [],
"is_response_text_check_enabled": true,
"expected_response_text": "expected_response_text_example",
"interval_in_seconds": 56,
"timeout_in_seconds": 56,
"healthy_threshold": 56,
"unhealthy_threshold": 56
}
},
"waf_config": {
"access_rules": [{
"name": "name_example",
"criteria": [{
"condition": "URL_IS",
"value": "value_example",
"is_case_sensitive": true
}],
"action": "ALLOW",
"block_action": "SET_RESPONSE_CODE",
"block_response_code": 56,
"block_error_page_message": "block_error_page_message_example",
"block_error_page_code": "block_error_page_code_example",
"block_error_page_description": "block_error_page_description_example",
"bypass_challenges": [],
"redirect_url": "redirect_url_example",
"redirect_response_code": "MOVED_PERMANENTLY",
"captcha_title": "captcha_title_example",
"captcha_header": "captcha_header_example",
"captcha_footer": "captcha_footer_example",
"captcha_submit_label": "captcha_submit_label_example",
"response_header_manipulation": [{
"action": "EXTEND_HTTP_RESPONSE_HEADER",
"header": "header_example",
"value": "value_example"
}]
}],
"address_rate_limiting": {
"is_enabled": true,
"allowed_rate_per_address": 56,
"max_delayed_count_per_address": 56,
"block_response_code": 56
},
"captchas": [{
"url": "url_example",
"session_expiration_in_seconds": 56,
"title": "title_example",
"header_text": "header_text_example",
"footer_text": "footer_text_example",
"failure_message": "failure_message_example",
"submit_label": "submit_label_example"
}],
"device_fingerprint_challenge": {
"is_enabled": true,
"action": "DETECT",
"failure_threshold": 56,
"action_expiration_in_seconds": 56,
"failure_threshold_expiration_in_seconds": 56,
"max_address_count": 56,
"max_address_count_expiration_in_seconds": 56,
"challenge_settings": {
"block_action": "SET_RESPONSE_CODE",
"block_response_code": 56,
"block_error_page_message": "block_error_page_message_example",
"block_error_page_description": "block_error_page_description_example",
"block_error_page_code": "block_error_page_code_example",
"captcha_title": "captcha_title_example",
"captcha_header": "captcha_header_example",
"captcha_footer": "captcha_footer_example",
"captcha_submit_label": "captcha_submit_label_example"
}
},
"good_bots": [{
"key": "key_example",
"name": "name_example",
"is_enabled": true,
"description": "description_example"
}],
"human_interaction_challenge": {
"is_enabled": true,
"action": "DETECT",
"failure_threshold": 56,
"action_expiration_in_seconds": 56,
"failure_threshold_expiration_in_seconds": 56,
"interaction_threshold": 56,
"recording_period_in_seconds": 56,
"set_http_header": {
"name": "name_example",
"value": "value_example"
},
"challenge_settings": {
"block_action": "SET_RESPONSE_CODE",
"block_response_code": 56,
"block_error_page_message": "block_error_page_message_example",
"block_error_page_description": "block_error_page_description_example",
"block_error_page_code": "block_error_page_code_example",
"captcha_title": "captcha_title_example",
"captcha_header": "captcha_header_example",
"captcha_footer": "captcha_footer_example",
"captcha_submit_label": "captcha_submit_label_example"
},
"is_nat_enabled": true
},
"js_challenge": {
"is_enabled": true,
"action": "DETECT",
"failure_threshold": 56,
"action_expiration_in_seconds": 56,
"set_http_header": {
"name": "name_example",
"value": "value_example"
},
"challenge_settings": {
"block_action": "SET_RESPONSE_CODE",
"block_response_code": 56,
"block_error_page_message": "block_error_page_message_example",
"block_error_page_description": "block_error_page_description_example",
"block_error_page_code": "block_error_page_code_example",
"captcha_title": "captcha_title_example",
"captcha_header": "captcha_header_example",
"captcha_footer": "captcha_footer_example",
"captcha_submit_label": "captcha_submit_label_example"
},
"are_redirects_challenged": true,
"criteria": [{
"condition": "URL_IS",
"value": "value_example",
"is_case_sensitive": true
}],
"is_nat_enabled": true
},
"origin": "origin_example",
"caching_rules": [{
"key": "key_example",
"name": "name_example",
"action": "CACHE",
"caching_duration": "caching_duration_example",
"is_client_caching_enabled": true,
"client_caching_duration": "client_caching_duration_example",
"criteria": [{
"condition": "URL_IS",
"value": "value_example"
}]
}],
"custom_protection_rules": [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"action": "DETECT",
"exclusions": [{
"target": "REQUEST_COOKIES",
"exclusions": []
}]
}],
"origin_groups": [],
"protection_rules": [{
"key": "key_example",
"mod_security_rule_ids": [],
"name": "name_example",
"description": "description_example",
"action": "OFF",
"labels": [],
"exclusions": [{
"target": "REQUEST_COOKIES",
"exclusions": []
}]
}],
"protection_settings": {
"block_action": "SHOW_ERROR_PAGE",
"block_response_code": 56,
"block_error_page_message": "block_error_page_message_example",
"block_error_page_code": "block_error_page_code_example",
"block_error_page_description": "block_error_page_description_example",
"max_argument_count": 56,
"max_name_length_per_argument": 56,
"max_total_name_length_of_arguments": 56,
"recommendations_period_in_days": 56,
"is_response_inspected": true,
"max_response_size_in_ki_b": 56,
"allowed_http_methods": [],
"media_types": []
},
"threat_feeds": [{
"key": "key_example",
"name": "name_example",
"action": "OFF",
"description": "description_example"
}],
"whitelists": [{
"name": "name_example",
"addresses": [],
"address_lists": []
}]
},
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.waas import WaasClient
from oci.waas.models import CreateWaasPolicyDetails
from oci.waas.models import UpdateWaasPolicyDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class WaasPolicyHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_possible_entity_types(self):
return super(WaasPolicyHelperGen, self).get_possible_entity_types() + [
"waaspolicy",
"waaspolicies",
"waaswaaspolicy",
"waaswaaspolicies",
"waaspolicyresource",
"waaspoliciesresource",
"waas",
]
def get_module_resource_id_param(self):
return "waas_policy_id"
def get_module_resource_id(self):
return self.module.params.get("waas_policy_id")
def get_get_fn(self):
return self.client.get_waas_policy
def get_get_model_from_summary_model(self, summary_model):
return oci_common_utils.call_with_backoff(
self.client.get_waas_policy, waas_policy_id=summary_model.id,
).data
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_waas_policy,
waas_policy_id=self.module.params.get("waas_policy_id"),
)
def get_required_kwargs_for_list(self):
required_list_method_params = [
"compartment_id",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
optional_list_method_params = ["display_name"]
return dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
and (
self._use_name_as_identifier()
or (
not self.module.params.get("key_by")
or param in self.module.params.get("key_by")
)
)
)
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_waas_policies, **kwargs
)
def get_create_model_class(self):
return CreateWaasPolicyDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_waas_policy,
call_fn_args=(),
call_fn_kwargs=dict(create_waas_policy_details=create_details,),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def get_update_model_class(self):
return UpdateWaasPolicyDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_waas_policy,
call_fn_args=(),
call_fn_kwargs=dict(
waas_policy_id=self.module.params.get("waas_policy_id"),
update_waas_policy_details=update_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_waas_policy,
call_fn_args=(),
call_fn_kwargs=dict(
waas_policy_id=self.module.params.get("waas_policy_id"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
WaasPolicyHelperCustom = get_custom_class("WaasPolicyHelperCustom")
class ResourceHelper(WaasPolicyHelperCustom, WaasPolicyHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
compartment_id=dict(type="str"),
display_name=dict(aliases=["name"], type="str"),
domain=dict(type="str"),
additional_domains=dict(type="list", elements="str"),
origins=dict(type="dict"),
origin_groups=dict(type="dict"),
policy_config=dict(
type="dict",
options=dict(
certificate_id=dict(type="str"),
is_https_enabled=dict(type="bool"),
is_https_forced=dict(type="bool"),
tls_protocols=dict(
type="list",
elements="str",
choices=["TLS_V1", "TLS_V1_1", "TLS_V1_2", "TLS_V1_3"],
),
is_origin_compression_enabled=dict(type="bool"),
is_behind_cdn=dict(type="bool"),
client_address_header=dict(
type="str",
choices=[
"X_FORWARDED_FOR",
"X_CLIENT_IP",
"X_REAL_IP",
"CLIENT_IP",
"TRUE_CLIENT_IP",
],
),
is_cache_control_respected=dict(type="bool"),
is_response_buffering_enabled=dict(type="bool"),
cipher_group=dict(type="str", choices=["DEFAULT"]),
load_balancing_method=dict(
type="dict",
options=dict(
method=dict(
type="str",
required=True,
choices=["ROUND_ROBIN", "STICKY_COOKIE", "IP_HASH"],
),
name=dict(type="str"),
domain=dict(type="str"),
expiration_time_in_seconds=dict(type="int"),
),
),
websocket_path_prefixes=dict(type="list", elements="str"),
is_sni_enabled=dict(type="bool"),
health_checks=dict(
type="dict",
options=dict(
is_enabled=dict(type="bool"),
method=dict(type="str", choices=["GET", "HEAD", "POST"]),
path=dict(type="str"),
headers=dict(type="dict"),
expected_response_code_group=dict(
type="list",
elements="str",
choices=["2XX", "3XX", "4XX", "5XX"],
),
is_response_text_check_enabled=dict(type="bool"),
expected_response_text=dict(type="str"),
interval_in_seconds=dict(type="int"),
timeout_in_seconds=dict(type="int"),
healthy_threshold=dict(type="int"),
unhealthy_threshold=dict(type="int"),
),
),
),
),
waf_config=dict(
type="dict",
options=dict(
access_rules=dict(
type="list",
elements="dict",
options=dict(
name=dict(type="str", required=True),
criteria=dict(
type="list",
elements="dict",
required=True,
options=dict(
condition=dict(
type="str",
required=True,
choices=[
"URL_IS",
"URL_IS_NOT",
"URL_STARTS_WITH",
"URL_PART_ENDS_WITH",
"URL_PART_CONTAINS",
"URL_REGEX",
"URL_DOES_NOT_MATCH_REGEX",
"URL_DOES_NOT_START_WITH",
"URL_PART_DOES_NOT_CONTAIN",
"URL_PART_DOES_NOT_END_WITH",
"IP_IS",
"IP_IS_NOT",
"IP_IN_LIST",
"IP_NOT_IN_LIST",
"HTTP_HEADER_CONTAINS",
"HTTP_METHOD_IS",
"HTTP_METHOD_IS_NOT",
"COUNTRY_IS",
"COUNTRY_IS_NOT",
"USER_AGENT_IS",
"USER_AGENT_IS_NOT",
],
),
value=dict(type="str", required=True),
is_case_sensitive=dict(type="bool"),
),
),
action=dict(
type="str",
required=True,
choices=[
"ALLOW",
"DETECT",
"BLOCK",
"BYPASS",
"REDIRECT",
"SHOW_CAPTCHA",
],
),
block_action=dict(
type="str",
choices=["SET_RESPONSE_CODE", "SHOW_ERROR_PAGE"],
),
block_response_code=dict(type="int"),
block_error_page_message=dict(type="str"),
block_error_page_code=dict(type="str"),
block_error_page_description=dict(type="str"),
bypass_challenges=dict(
type="list",
elements="str",
choices=[
"JS_CHALLENGE",
"DEVICE_FINGERPRINT_CHALLENGE",
"HUMAN_INTERACTION_CHALLENGE",
"CAPTCHA",
],
no_log=True,
),
redirect_url=dict(type="str"),
redirect_response_code=dict(
type="str", choices=["MOVED_PERMANENTLY", "FOUND"]
),
captcha_title=dict(type="str"),
captcha_header=dict(type="str"),
captcha_footer=dict(type="str"),
captcha_submit_label=dict(type="str"),
response_header_manipulation=dict(
type="list",
elements="dict",
options=dict(
action=dict(
type="str",
required=True,
choices=[
"EXTEND_HTTP_RESPONSE_HEADER",
"ADD_HTTP_RESPONSE_HEADER",
"REMOVE_HTTP_RESPONSE_HEADER",
],
),
header=dict(type="str", required=True),
value=dict(type="str"),
),
),
),
),
address_rate_limiting=dict(
type="dict",
options=dict(
is_enabled=dict(type="bool", required=True),
allowed_rate_per_address=dict(type="int"),
max_delayed_count_per_address=dict(type="int"),
block_response_code=dict(type="int"),
),
),
captchas=dict(
type="list",
elements="dict",
options=dict(
url=dict(type="str", required=True),
session_expiration_in_seconds=dict(
type="int", required=True
),
title=dict(type="str", required=True),
header_text=dict(type="str"),
footer_text=dict(type="str"),
failure_message=dict(type="str", required=True),
submit_label=dict(type="str", required=True),
),
),
device_fingerprint_challenge=dict(
type="dict",
options=dict(
is_enabled=dict(type="bool", required=True),
action=dict(type="str", choices=["DETECT", "BLOCK"]),
failure_threshold=dict(type="int"),
action_expiration_in_seconds=dict(type="int"),
failure_threshold_expiration_in_seconds=dict(type="int"),
max_address_count=dict(type="int"),
max_address_count_expiration_in_seconds=dict(type="int"),
challenge_settings=dict(
type="dict",
options=dict(
block_action=dict(
type="str",
choices=[
"SET_RESPONSE_CODE",
"SHOW_ERROR_PAGE",
"SHOW_CAPTCHA",
],
),
block_response_code=dict(type="int"),
block_error_page_message=dict(type="str"),
block_error_page_description=dict(type="str"),
block_error_page_code=dict(type="str"),
captcha_title=dict(type="str"),
captcha_header=dict(type="str"),
captcha_footer=dict(type="str"),
captcha_submit_label=dict(type="str"),
),
),
),
),
human_interaction_challenge=dict(
type="dict",
options=dict(
is_enabled=dict(type="bool", required=True),
action=dict(type="str", choices=["DETECT", "BLOCK"]),
failure_threshold=dict(type="int"),
action_expiration_in_seconds=dict(type="int"),
failure_threshold_expiration_in_seconds=dict(type="int"),
interaction_threshold=dict(type="int"),
recording_period_in_seconds=dict(type="int"),
set_http_header=dict(
type="dict",
options=dict(
name=dict(type="str", required=True),
value=dict(type="str", required=True),
),
),
challenge_settings=dict(
type="dict",
options=dict(
block_action=dict(
type="str",
choices=[
"SET_RESPONSE_CODE",
"SHOW_ERROR_PAGE",
"SHOW_CAPTCHA",
],
),
block_response_code=dict(type="int"),
block_error_page_message=dict(type="str"),
block_error_page_description=dict(type="str"),
block_error_page_code=dict(type="str"),
captcha_title=dict(type="str"),
captcha_header=dict(type="str"),
captcha_footer=dict(type="str"),
captcha_submit_label=dict(type="str"),
),
),
is_nat_enabled=dict(type="bool"),
),
),
js_challenge=dict(
type="dict",
options=dict(
is_enabled=dict(type="bool", required=True),
action=dict(type="str", choices=["DETECT", "BLOCK"]),
failure_threshold=dict(type="int"),
action_expiration_in_seconds=dict(type="int"),
set_http_header=dict(
type="dict",
options=dict(
name=dict(type="str", required=True),
value=dict(type="str", required=True),
),
),
challenge_settings=dict(
type="dict",
options=dict(
block_action=dict(
type="str",
choices=[
"SET_RESPONSE_CODE",
"SHOW_ERROR_PAGE",
"SHOW_CAPTCHA",
],
),
block_response_code=dict(type="int"),
block_error_page_message=dict(type="str"),
block_error_page_description=dict(type="str"),
block_error_page_code=dict(type="str"),
captcha_title=dict(type="str"),
captcha_header=dict(type="str"),
captcha_footer=dict(type="str"),
captcha_submit_label=dict(type="str"),
),
),
are_redirects_challenged=dict(type="bool"),
criteria=dict(
type="list",
elements="dict",
options=dict(
condition=dict(
type="str",
required=True,
choices=[
"URL_IS",
"URL_IS_NOT",
"URL_STARTS_WITH",
"URL_PART_ENDS_WITH",
"URL_PART_CONTAINS",
"URL_REGEX",
"URL_DOES_NOT_MATCH_REGEX",
"URL_DOES_NOT_START_WITH",
"URL_PART_DOES_NOT_CONTAIN",
"URL_PART_DOES_NOT_END_WITH",
"IP_IS",
"IP_IS_NOT",
"IP_IN_LIST",
"IP_NOT_IN_LIST",
"HTTP_HEADER_CONTAINS",
"HTTP_METHOD_IS",
"HTTP_METHOD_IS_NOT",
"COUNTRY_IS",
"COUNTRY_IS_NOT",
"USER_AGENT_IS",
"USER_AGENT_IS_NOT",
],
),
value=dict(type="str", required=True),
is_case_sensitive=dict(type="bool"),
),
),
is_nat_enabled=dict(type="bool"),
),
),
origin=dict(type="str"),
caching_rules=dict(
type="list",
elements="dict",
options=dict(
key=dict(type="str", no_log=True),
name=dict(type="str", required=True),
action=dict(
type="str",
required=True,
choices=["CACHE", "BYPASS_CACHE"],
),
caching_duration=dict(type="str"),
is_client_caching_enabled=dict(type="bool"),
client_caching_duration=dict(type="str"),
criteria=dict(
type="list",
elements="dict",
required=True,
options=dict(
condition=dict(
type="str",
required=True,
choices=[
"URL_IS",
"URL_STARTS_WITH",
"URL_PART_ENDS_WITH",
"URL_PART_CONTAINS",
],
),
value=dict(type="str", required=True),
),
),
),
),
custom_protection_rules=dict(
type="list",
elements="dict",
options=dict(
id=dict(type="str"),
action=dict(type="str", choices=["DETECT", "BLOCK"]),
exclusions=dict(
type="list",
elements="dict",
options=dict(
target=dict(
type="str",
choices=[
"REQUEST_COOKIES",
"REQUEST_COOKIE_NAMES",
"ARGS",
"ARGS_NAMES",
],
),
exclusions=dict(type="list", elements="str"),
),
),
),
),
origin_groups=dict(type="list", elements="str"),
protection_settings=dict(
type="dict",
options=dict(
block_action=dict(
type="str",
choices=["SHOW_ERROR_PAGE", "SET_RESPONSE_CODE"],
),
block_response_code=dict(type="int"),
block_error_page_message=dict(type="str"),
block_error_page_code=dict(type="str"),
block_error_page_description=dict(type="str"),
max_argument_count=dict(type="int"),
max_name_length_per_argument=dict(type="int"),
max_total_name_length_of_arguments=dict(type="int"),
recommendations_period_in_days=dict(type="int"),
is_response_inspected=dict(type="bool"),
max_response_size_in_ki_b=dict(type="int"),
allowed_http_methods=dict(
type="list",
elements="str",
choices=[
"OPTIONS",
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"TRACE",
"CONNECT",
"PATCH",
"PROPFIND",
],
),
media_types=dict(type="list", elements="str"),
),
),
whitelists=dict(
type="list",
elements="dict",
options=dict(
name=dict(type="str", required=True),
addresses=dict(type="list", elements="str"),
address_lists=dict(type="list", elements="str"),
),
),
good_bots=dict(
type="list",
elements="dict",
options=dict(
key=dict(type="str", required=True, no_log=True),
name=dict(type="str"),
is_enabled=dict(type="bool", required=True),
description=dict(type="str"),
),
),
protection_rules=dict(
type="list",
elements="dict",
options=dict(
key=dict(type="str", no_log=True),
mod_security_rule_ids=dict(type="list", elements="str"),
name=dict(type="str"),
description=dict(type="str"),
action=dict(type="str", choices=["OFF", "DETECT", "BLOCK"]),
labels=dict(type="list", elements="str"),
exclusions=dict(
type="list",
elements="dict",
options=dict(
target=dict(
type="str",
choices=[
"REQUEST_COOKIES",
"REQUEST_COOKIE_NAMES",
"ARGS",
"ARGS_NAMES",
],
),
exclusions=dict(type="list", elements="str"),
),
),
),
),
threat_feeds=dict(
type="list",
elements="dict",
options=dict(
key=dict(type="str", no_log=True),
name=dict(type="str"),
action=dict(type="str", choices=["OFF", "DETECT", "BLOCK"]),
description=dict(type="str"),
),
),
),
),
freeform_tags=dict(type="dict"),
defined_tags=dict(type="dict"),
waas_policy_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="waas_policy",
service_client_class=WaasClient,
namespace="waas",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 57.911771 | 160 | 0.469402 | 25,558 | 280,930 | 5.004969 | 0.0412 | 0.016636 | 0.027776 | 0.034311 | 0.942869 | 0.926702 | 0.909645 | 0.890984 | 0.871299 | 0.860081 | 0 | 0.017723 | 0.473591 | 280,930 | 4,850 | 161 | 57.923711 | 0.847259 | 0.00157 | 0 | 0.851104 | 0 | 0.116088 | 0.893599 | 0.075853 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003155 | false | 0.007571 | 0.001682 | 0.001893 | 0.008202 | 0.003365 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
76ff152cbebc0d0c31d2a34b12dd3714e4cb38d1 | 183 | py | Python | ACM-Solution/MAIN113.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 2 | 2016-04-26T15:40:40.000Z | 2018-07-18T10:16:42.000Z | ACM-Solution/MAIN113.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 1 | 2016-04-26T15:44:15.000Z | 2016-04-29T14:44:40.000Z | ACM-Solution/MAIN113.py | wasi0013/Python-CodeBase | 4a7a36395162f68f84ded9085fa34cc7c9b19233 | [
"MIT"
] | 1 | 2018-10-02T16:12:19.000Z | 2018-10-02T16:12:19.000Z | import sys
ans=[0,3,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for x in range(3,31):ans[x]=ans[x-2]+ans[x-1]*2
for _ in[0]*int(input()):print(ans[int(input())])
| 36.6 | 69 | 0.568306 | 59 | 183 | 1.745763 | 0.271186 | 0.543689 | 0.786408 | 1.009709 | 0.281553 | 0.281553 | 0.281553 | 0.281553 | 0.281553 | 0.281553 | 0 | 0.230769 | 0.076503 | 183 | 4 | 70 | 45.75 | 0.378698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a07679d769fe0aa81982a5148fed66d96258a0a | 164 | py | Python | src/solvers/__init__.py | NoSyu/VHUCM | 3fab78f1b0cced8d9b2a2d5b6f3d6f1021ce9a93 | [
"MIT"
] | 8 | 2020-02-04T11:15:07.000Z | 2021-05-31T09:47:32.000Z | src/solvers/__init__.py | NoSyu/VHUCM | 3fab78f1b0cced8d9b2a2d5b6f3d6f1021ce9a93 | [
"MIT"
] | null | null | null | src/solvers/__init__.py | NoSyu/VHUCM | 3fab78f1b0cced8d9b2a2d5b6f3d6f1021ce9a93 | [
"MIT"
] | null | null | null | from .solver import *
from .hred_solver import *
from .speakaddr_solver import *
from .vhred_solver import *
from .vhcr_solver import *
from .vhucm_solver import *
| 23.428571 | 31 | 0.780488 | 23 | 164 | 5.347826 | 0.347826 | 0.585366 | 0.650407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146341 | 164 | 6 | 32 | 27.333333 | 0.878571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0a1f8058c82d36842a0241ee22c179151ec0dc5c | 213 | py | Python | causal_world/loggers/__init__.py | michaelfeil/CausalWorld | ff866159ef0ee9c407893ae204e93eb98dd68be2 | [
"MIT"
] | 2 | 2021-09-22T08:20:12.000Z | 2021-11-16T14:20:45.000Z | causal_world/loggers/__init__.py | michaelfeil/CausalWorld | ff866159ef0ee9c407893ae204e93eb98dd68be2 | [
"MIT"
] | null | null | null | causal_world/loggers/__init__.py | michaelfeil/CausalWorld | ff866159ef0ee9c407893ae204e93eb98dd68be2 | [
"MIT"
] | null | null | null | from causal_world.loggers.tracker import Tracker
from causal_world.loggers.data_recorder import DataRecorder
from causal_world.loggers.episode import Episode
from causal_world.loggers.data_loader import DataLoader | 53.25 | 59 | 0.892019 | 30 | 213 | 6.133333 | 0.4 | 0.217391 | 0.326087 | 0.478261 | 0.282609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070423 | 213 | 4 | 60 | 53.25 | 0.929293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6a7c9467b2384413f9f061f33eda470f7b393e81 | 11,029 | py | Python | development/tests/unit/lib/exclusions/validators_test.py | jchrisfarris/antiope-scorecards | 82a1e228f4bd23f756c1dec8c0582fcde98de564 | [
"Apache-2.0"
] | 1 | 2020-09-23T21:40:16.000Z | 2020-09-23T21:40:16.000Z | development/tests/unit/lib/exclusions/validators_test.py | jchrisfarris/antiope-scorecards | 82a1e228f4bd23f756c1dec8c0582fcde98de564 | [
"Apache-2.0"
] | null | null | null | development/tests/unit/lib/exclusions/validators_test.py | jchrisfarris/antiope-scorecards | 82a1e228f4bd23f756c1dec8c0582fcde98de564 | [
"Apache-2.0"
] | 3 | 2020-07-11T19:18:12.000Z | 2021-08-14T17:43:06.000Z | from unittest.mock import patch, Mock
from datetime import datetime, timedelta
from lib.dynamodb import requirements_table, ncr_table, scans_table
from lib.exclusions import validators
from tests.unit.api.test_setup_resources import sample_records
EXCEPTION = sample_records.EXCLUSION_TYPES['exception']
class TestAccountId:
def test_account_id(self):
result, message = validators.account_id({}, {'accountId': '123123123123'}, EXCEPTION, False)
assert result is True
assert message is None
def test_account_id_numeric(self):
result, message = validators.account_id({}, {'accountId': 'abcabcabcabc'}, EXCEPTION, False)
assert result is False
assert message
def test_account_id_length(self):
result, message = validators.account_id({}, {'accountId': '123123'}, EXCEPTION, False)
assert result is False
assert message
def test_account_id_wildcard_admin(self):
result, message = validators.account_id({}, {'accountId': '*'}, EXCEPTION, True)
assert result is True
assert message is None
def test_account_id_wildcard_user(self):
result, message = validators.account_id({}, {'accountId': '*'}, EXCEPTION, False)
assert result is False
assert message
class TestExpirationDate:
def test_invalid_expiration_date(self):
result, message = validators.expiration_date({'type': 'exception'}, {'expirationDate': 'invalid-datetime'}, EXCEPTION, True)
assert result is False
assert message
def test_large_expiration_date(self):
result, message = validators.expiration_date({'type': 'exception'}, {
'expirationDate': (datetime.now() + timedelta(days=sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays'] + 1)).strftime('%Y/%m/%d'),
}, EXCEPTION, False)
assert result is False
assert message
def test_valid_expiration_date(self):
result, message = validators.expiration_date({'type': 'exception'}, {
'expirationDate': (datetime.now() + timedelta(days=sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays'] - 1)).strftime('%Y/%m/%d'),
}, EXCEPTION, False)
assert result is True
assert message is None
def test_negative_expiration_date(self):
result, message = validators.expiration_date({'type': 'exception'}, {
'expirationDate': (datetime.now() + timedelta(days=-1)).strftime('%Y/%m/%d'),
}, EXCEPTION, False)
assert result is False
assert message
class TestResourceId:
@patch.object(ncr_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
@patch.object(scans_table, 'get_latest_complete_scan', Mock(return_value='latest-scan'))
def test_resource_id(self):
result, message = validators.resource_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is True
assert message is None
@patch.object(ncr_table, 'get_item', Mock(return_value={'Item': {}}))
@patch.object(scans_table, 'get_latest_complete_scan', Mock(return_value='latest-scan'))
def test_no_resource(self):
result, message = validators.resource_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is False
assert message
@patch.object(ncr_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
@patch.object(scans_table, 'get_latest_complete_scan', Mock(return_value='latest-scan'))
def test_wildcard_user(self):
result, message = validators.resource_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': '*',
}, EXCEPTION, False)
assert result is False
assert message
@patch.object(ncr_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
@patch.object(scans_table, 'get_latest_complete_scan', Mock(return_value='latest-scan'))
def test_wildcard_admin(self):
result, message = validators.resource_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': '*',
}, EXCEPTION, True)
assert result is True
assert message is None
class TestRequirementId:
@patch.object(requirements_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
def test_resource_id(self):
result, message = validators.requirement_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is True
assert message is None
@patch.object(requirements_table, 'get_item', Mock(return_value={'Item': {}}))
def test_missing_resource(self):
result, message = validators.requirement_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': 'My-Requirement',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is False
assert message
@patch.object(requirements_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
def test_wildcard_user(self):
result, message = validators.requirement_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': '*',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is False
assert message
@patch.object(requirements_table, 'get_item', Mock(return_value={'Item': {'ncr':'resource'}}))
def test_wildcard_admin(self):
result, message = validators.requirement_id({'type': 'exception'}, {
'accountId': '123123123123',
'requirementId': '*',
'resourceId': 'resource',
}, EXCEPTION, False)
assert result is False
assert message
class TestFormFields:
def test_form_fields(self):
result, message = validators.form_fields({'type': 'exception'}, {
'formFields': {
'reason': 'because',
},
}, EXCEPTION, False)
assert result is True
assert message is None
def test_form_fields_invalid(self):
result, message = validators.form_fields({'type': 'exception'}, {
'formFields': {
'invalid-form-field': 'because',
},
}, EXCEPTION, False)
assert result is False
assert message
class TestUpdateRequested:
def test_update_requested_form_fields(self):
result, message = validators.update_requested({'type': 'exception'}, {
'updateRequested': {
'formFields': {
'reason': 'because',
},
},
}, EXCEPTION, False)
assert result is True
assert message is None
def test_update_requested_form_fields_invalid(self):
result, message = validators.update_requested({'type': 'exception'}, {
'updateRequested': {
'formFields': {
'invalid-field': 'because',
},
},
}, EXCEPTION, False)
assert result is False
assert message
def test_update_requested_datetime(self):
max_days = sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays']
result, message = validators.update_requested({
'type': 'exception',
'accountId': '123123123123',
'expirationDate': (datetime.now() + timedelta(days=max_days - 1)).strftime('%Y/%m/%d'),
}, {
'updateRequested': {
'expirationDate': (datetime.now() + timedelta(days=max_days - 3)).strftime('%Y/%m/%d'),
},
}, EXCEPTION, False)
assert result is True
assert message is None
def test_update_requested_datetime_invalid(self):
max_days = sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays']
result, message = validators.update_requested({
'type': 'exception',
'accountId': '123123123123',
'expirationDate': (datetime.now() + timedelta(days=max_days - 1)).strftime('%Y/%m/%d'),
}, {
'updateRequested': {
'expirationDate': (datetime.now() + timedelta(days=max_days + 1)).strftime('%Y/%m/%d'),
},
}, EXCEPTION, False)
assert result is False
assert message
def test_update_requested_datetime_not_parsable(self):
max_days = sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays']
result, message = validators.update_requested({
'type': 'exception',
'accountId': '123123123123',
'expirationDate': (datetime.now() + timedelta(days=max_days - 3)).strftime('%Y/%m/%d'),
}, {
'updateRequested': {
'expirationDate': 'bad-datetime',
},
}, EXCEPTION, False)
assert result is False
assert message
def test_update_requested_extra_keys(self):
max_days = sample_records.EXCLUSION_TYPES['exception']['maxDurationInDays']
result, message = validators.update_requested({
'type': 'exception',
'accountId': '123123123123',
'expirationDate': (datetime.now() + timedelta(days=max_days - 3)).strftime('%Y/%m/%d'),
}, {
'updateRequested': {
'extraKey': 'extraValue'
},
}, EXCEPTION, False)
assert result is False
assert message
class TestAdminComments:
def test_admin_comments(self):
result, message = validators.admin_comments({'type': 'exception'}, {
'adminComments': 'administrative messaging',
}, EXCEPTION, False)
assert result is True
assert message is None
def test_admin_comments_not_string(self):
result, message = validators.admin_comments({'type': 'exception'}, {
'adminComments': {'something': 'weird'},
}, EXCEPTION, False)
assert result is False
assert message
class TestHidesResources:
def test_hides_resources(self):
result, message = validators.hides_resources({'type': 'exception'}, {
'hidesResources': True,
}, EXCEPTION, False)
assert result is True
assert message is None
def test_hides_resources_not_string(self):
result, message = validators.hides_resources({'type': 'exception'}, {
'hidesResources': {'something': 'weird'},
}, EXCEPTION, False)
assert result is False
assert message
| 38.295139 | 155 | 0.613202 | 1,074 | 11,029 | 6.121043 | 0.105214 | 0.073623 | 0.10146 | 0.102829 | 0.900669 | 0.889261 | 0.874201 | 0.848494 | 0.823091 | 0.747034 | 0 | 0.020874 | 0.257231 | 11,029 | 287 | 156 | 38.428571 | 0.781616 | 0 | 0 | 0.721311 | 0 | 0 | 0.181975 | 0.008704 | 0 | 0 | 0 | 0 | 0.237705 | 1 | 0.118852 | false | 0 | 0.020492 | 0 | 0.172131 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6ae7dbbec3625d1a6ad692088cf8d3a4f504963a | 43 | py | Python | apps/gestion/views/__init__.py | robertowest/lubre-abogado | 5ed12e2e4131f0deaced9912073304ecec13f155 | [
"Unlicense"
] | null | null | null | apps/gestion/views/__init__.py | robertowest/lubre-abogado | 5ed12e2e4131f0deaced9912073304ecec13f155 | [
"Unlicense"
] | null | null | null | apps/gestion/views/__init__.py | robertowest/lubre-abogado | 5ed12e2e4131f0deaced9912073304ecec13f155 | [
"Unlicense"
] | null | null | null | from . import home
from . import clientecta | 21.5 | 24 | 0.790698 | 6 | 43 | 5.666667 | 0.666667 | 0.588235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162791 | 43 | 2 | 24 | 21.5 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7c5071ba4f2be757c4bf86b0b895e6d24c4e737a | 738 | bzl | Python | objc/deps.bzl | kalbasit/rules_proto_grpc | 7e0a97adc8801df1cd74ee435d74bbd857c98a36 | [
"Apache-2.0"
] | 1 | 2021-08-11T23:14:07.000Z | 2021-08-11T23:14:07.000Z | objc/deps.bzl | kalbasit/rules_proto_grpc | 7e0a97adc8801df1cd74ee435d74bbd857c98a36 | [
"Apache-2.0"
] | null | null | null | objc/deps.bzl | kalbasit/rules_proto_grpc | 7e0a97adc8801df1cd74ee435d74bbd857c98a36 | [
"Apache-2.0"
] | null | null | null | load(":repositories.bzl", "objc_repos")
# NOTE: THE RULES IN THIS FILE ARE KEPT FOR BACKWARDS COMPATIBILITY ONLY.
# Please use the rules in repositories.bzl
def objc_proto_compile(**kwargs):
print("Import of rules in deps.bzl is deprecated, please use repositories.bzl")
objc_repos(**kwargs)
def objc_proto_library(**kwargs):
print("Import of rules in deps.bzl is deprecated, please use repositories.bzl")
objc_repos(**kwargs)
def objc_grpc_compile(**kwargs):
print("Import of rules in deps.bzl is deprecated, please use repositories.bzl")
objc_repos(**kwargs)
def objc_grpc_library(**kwargs):
print("Import of rules in deps.bzl is deprecated, please use repositories.bzl")
objc_repos(**kwargs)
| 35.142857 | 83 | 0.735772 | 108 | 738 | 4.907407 | 0.287037 | 0.169811 | 0.179245 | 0.226415 | 0.741509 | 0.741509 | 0.741509 | 0.741509 | 0.741509 | 0.741509 | 0 | 0 | 0.158537 | 738 | 20 | 84 | 36.9 | 0.853462 | 0.159892 | 0 | 0.615385 | 0 | 0 | 0.497569 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.307692 | true | 0 | 0.307692 | 0 | 0.615385 | 0.307692 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
7cf317e171b382a15c5e4216907e78915b8b5739 | 3,343 | py | Python | utils/reid_metric.py | NIRVANALAN/magnifiernet_reid | 2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1 | [
"MIT"
] | 6 | 2020-05-25T15:36:06.000Z | 2021-03-12T07:53:11.000Z | utils/reid_metric.py | NIRVANALAN/magnifiernet_reid | 2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1 | [
"MIT"
] | null | null | null | utils/reid_metric.py | NIRVANALAN/magnifiernet_reid | 2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1 | [
"MIT"
] | 4 | 2020-10-04T01:00:12.000Z | 2021-05-02T06:25:09.000Z | # encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
import numpy as np
import torch
from ignite.metrics import Metric
from data.datasets.eval_reid import eval_func
from .re_ranking import re_ranking
class R1_mAP(Metric):
def __init__(self, num_query, max_rank=50, feat_norm='yes'):
super(R1_mAP, self).__init__()
self.num_query = num_query
self.max_rank = max_rank
self.feat_norm = feat_norm
def reset(self):
self.feats = []
self.pids = []
self.camids = []
def update(self, output):
feat, pid, camid = output
self.feats.append(feat)
self.pids.extend(np.asarray(pid))
self.camids.extend(np.asarray(camid))
def compute(self):
feats = torch.cat(self.feats, dim=0)
if self.feat_norm == 'yes':
print("The test feature is normalized")
feats = torch.nn.functional.normalize(feats, dim=1, p=2)
# query
qf = feats[:self.num_query]
q_pids = np.asarray(self.pids[:self.num_query])
q_camids = np.asarray(self.camids[:self.num_query])
# gallery
gf = feats[self.num_query:]
g_pids = np.asarray(self.pids[self.num_query:])
g_camids = np.asarray(self.camids[self.num_query:])
m, n = qf.shape[0], gf.shape[0]
distmat = torch.pow(qf, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(gf, 2).sum(dim=1, keepdim=True).expand(n, m).t()
distmat.addmm_(1, -2, qf, gf.t())
distmat = distmat.cpu().numpy()
print('I am saving dist map!!!!')
np.save('distmap.npy', distmat)
cmc, mAP = eval_func(distmat, q_pids, g_pids, q_camids, g_camids)
return cmc, mAP
class R1_mAP_reranking(Metric):
def __init__(self, num_query, max_rank=50, feat_norm='yes'):
super(R1_mAP_reranking, self).__init__()
self.num_query = num_query
self.max_rank = max_rank
self.feat_norm = feat_norm
def reset(self):
self.feats = []
self.pids = []
self.camids = []
def update(self, output):
feat, pid, camid = output
self.feats.append(feat)
self.pids.extend(np.asarray(pid))
self.camids.extend(np.asarray(camid))
def compute(self):
feats = torch.cat(self.feats, dim=0)
if self.feat_norm == 'yes':
print("The test feature is normalized")
feats = torch.nn.functional.normalize(feats, dim=1, p=2)
# query
qf = feats[:self.num_query]
q_pids = np.asarray(self.pids[:self.num_query])
q_camids = np.asarray(self.camids[:self.num_query])
# gallery
gf = feats[self.num_query:]
g_pids = np.asarray(self.pids[self.num_query:])
g_camids = np.asarray(self.camids[self.num_query:])
# m, n = qf.shape[0], gf.shape[0]
# distmat = torch.pow(qf, 2).sum(dim=1, keepdim=True).expand(m, n) + \
# torch.pow(gf, 2).sum(dim=1, keepdim=True).expand(n, m).t()
# distmat.addmm_(1, -2, qf, gf.t())
# distmat = distmat.cpu().numpy()
print("Enter reranking")
distmat = re_ranking(qf, gf, k1=20, k2=6, lambda_value=0.3)
cmc, mAP = eval_func(distmat, q_pids, g_pids, q_camids, g_camids)
return cmc, mAP
| 33.43 | 78 | 0.594376 | 479 | 3,343 | 3.981211 | 0.215031 | 0.075511 | 0.100682 | 0.033561 | 0.837965 | 0.837965 | 0.837965 | 0.837965 | 0.837965 | 0.837965 | 0 | 0.016227 | 0.262638 | 3,343 | 99 | 79 | 33.767677 | 0.757404 | 0.100509 | 0 | 0.742857 | 0 | 0 | 0.040803 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114286 | false | 0 | 0.071429 | 0 | 0.242857 | 0.057143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7cfc43d60006e4a1142b31b0e6caaddf3aae711e | 78,500 | py | Python | src/config/schema-transformer/test/test_service_policy.py | UbuntuEvangelist/contrail-controller | 4e8a992230f8f8e91e4f753e19b5442d9e1b446d | [
"Apache-2.0"
] | null | null | null | src/config/schema-transformer/test/test_service_policy.py | UbuntuEvangelist/contrail-controller | 4e8a992230f8f8e91e4f753e19b5442d9e1b446d | [
"Apache-2.0"
] | null | null | null | src/config/schema-transformer/test/test_service_policy.py | UbuntuEvangelist/contrail-controller | 4e8a992230f8f8e91e4f753e19b5442d9e1b446d | [
"Apache-2.0"
] | 18 | 2017-01-12T09:28:44.000Z | 2019-04-18T20:47:42.000Z | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import copy
import gevent
from testtools.matchers import Contains, Not
try:
import to_bgp
except ImportError:
from schema_transformer import to_bgp
try:
import config_db
except ImportError:
from schema_transformer import config_db
from vnc_api.vnc_api import (VirtualNetwork, SequenceType, VirtualNetworkType,
VirtualNetworkPolicyType, NoIdError, NetworkIpam, VirtualMachine,
VnSubnetsType, IpamSubnetType, SubnetType, FloatingIpPool,
FloatingIp, VirtualMachineInterfacePropertiesType, PortType,
VirtualMachineInterface, InterfaceMirrorType, MirrorActionType,
ServiceChainInfo, RoutingPolicy, RoutingPolicyServiceInstanceType,
RouteListType, RouteAggregate,RouteTargetList, ServiceInterfaceTag,
PolicyBasedForwardingRuleType)
from test_case import STTestCase, retries
from test_policy import VerifyPolicy
sys.path.append("../common/tests")
from test_utils import FakeIfmapClient, CassandraCFs
import test_common
class VerifyServicePolicy(VerifyPolicy):
def __init__(self, vnc_lib):
self._vnc_lib = vnc_lib
@retries(5)
def wait_to_get_sc(self, left_vn=None, right_vn=None, si_name=None,
check_create=False):
for sc in to_bgp.ServiceChain.values():
if (left_vn in (None, sc.left_vn) and
right_vn in (None, sc.right_vn) and
si_name in (sc.service_list[0], None)):
if check_create and not sc.created:
raise Exception('Service chain not created')
return sc.name
raise Exception('Service chain not found')
@retries(5)
def wait_to_get_link(self, ident_name, link_fq_name):
self.assertThat(str(FakeIfmapClient._graph[ident_name]['links']),
Contains(link_fq_name))
@retries(5)
def wait_to_remove_link(self, ident_name, link_fq_name):
self.assertThat(str(FakeIfmapClient._graph[ident_name]['links']),
Not(Contains(link_fq_name)))
@retries(5)
def check_service_chain_prefix_match(self, fq_name, prefix):
ri = self._vnc_lib.routing_instance_read(fq_name)
sci = ri.get_service_chain_information()
if sci is None:
print "retrying ... ", test_common.lineno()
raise Exception('Service chain info not found for %s' % fq_name)
self.assertEqual(sci.prefix[0], prefix)
@retries(5)
def check_service_chain_info(self, fq_name, expected):
ri = self._vnc_lib.routing_instance_read(fq_name)
sci = ri.get_service_chain_information()
if sci is None:
raise Exception('Service chain info not found for %s' % fq_name)
self.assertEqual(sci, expected)
@retries(5)
def check_v6_service_chain_info(self, fq_name, expected):
ri = self._vnc_lib.routing_instance_read(fq_name)
sci = ri.get_ipv6_service_chain_information()
if sci is None:
raise Exception('Ipv6 service chain info not found for %s' % fq_name)
self.assertEqual(sci, expected)
@retries(5)
def check_service_chain_is_deleted(self, sc_uuid):
for sc in to_bgp.ServiceChain.values():
if sc_uuid == sc.name:
raise Exception('Service chain %s not deleted' % sc_uuid)
@retries(5)
def check_analyzer_ip(self, vmi_fq_name):
vmi = self._vnc_lib.virtual_machine_interface_read(vmi_fq_name)
vmi_props = vmi.get_virtual_machine_interface_properties()
ip = vmi_props.get_interface_mirror().get_mirror_to().get_analyzer_ip_address()
self.assertTrue(ip != None)
@retries(5)
def check_analyzer_no_ip(self, vmi_fq_name):
vmi = self._vnc_lib.virtual_machine_interface_read(vmi_fq_name)
vmi_props = vmi.get_virtual_machine_interface_properties()
ip = None
try:
ip = vmi_props.get_interface_mirror().get_mirror_to().get_analyzer_ip_address()
except AttributeError as e:
pass
self.assertTrue(ip == None)
@retries(5)
def check_service_chain_pbf_rules(self, vn1, vn2, sc_ri_name, service_name, sc_ip):
mac1 = '02:00:00:00:00:01'
mac2 = '02:00:00:00:00:02'
expected_pbf = PolicyBasedForwardingRuleType(
vlan_tag=1, direction='both', service_chain_address=sc_ip)
for interface_type in ('left', 'right'):
if interface_type == 'left':
expected_pbf.src_mac = mac1
expected_pbf.dst_mac = mac2
vmi_fq_name = ['default-domain', 'default-project',
'default-domain__default-project__%s__1__left__1' %
service_name]
service_ri_fq_name = self.get_ri_name(vn1, sc_ri_name)
else:
expected_pbf.src_mac = mac2
expected_pbf.dst_mac = mac1
vmi_fq_name = ['default-domain', 'default-project',
'default-domain__default-project__%s__1__right__2' %
service_name]
service_ri_fq_name = self.get_ri_name(vni2, sc_ri_name)
vmi = self._vnc_lib.virtual_machine_interface_read(vmi_fq_name)
ri_refs = vmi.get_routing_instance_refs()
for ri_ref in ri_refs:
sc_name = ri_ref['to']
if sc_name == service_ri_fq_name:
pbf_rule = ri_ref['attr']
self.assertEqual(pbf_rule, expected_pbf)
return
raise Exception('Service chain pbf rules not found for %s' % service_ri_fq_name)
@retries(5)
def check_service_chain_ip(self, sc_name):
_SC_IP_CF = 'service_chain_ip_address_table'
cf = CassandraCFs.get_cf('to_bgp_keyspace', _SC_IP_CF)
ip = cf.get(sc_name)['ip_address']
@retries(5)
def check_acl_match_nets(self, fq_name, vn1_fq_name, vn2_fq_name):
acl = self._vnc_lib.access_control_list_read(fq_name)
for rule in acl.access_control_list_entries.acl_rule:
if (rule.match_condition.src_address.virtual_network == vn1_fq_name and
rule.match_condition.dst_address.virtual_network == vn2_fq_name):
return
raise Exception('nets %s/%s not found in ACL rules for %s' %
(vn1_fq_name, vn2_fq_name, fq_name))
@retries(5)
def check_acl_not_match_nets(self, fq_name, vn1_fq_name, vn2_fq_name):
acl = None
try:
acl = self._vnc_lib.access_control_list_read(fq_name)
except NoIdError:
return
found = False
for rule in acl.access_control_list_entries.acl_rule:
if (rule.match_condition.src_address.virtual_network == vn1_fq_name and
rule.match_condition.dst_address.virtual_network == vn2_fq_name):
found = True
if found == True:
raise Exception('nets %s/%s found in ACL rules for %s' %
(vn1_fq_name, vn2_fq_name, fq_name))
return
@retries(5)
def check_acl_match_mirror_to_ip(self, fq_name):
acl = self._vnc_lib.access_control_list_read(fq_name)
for rule in acl.access_control_list_entries.acl_rule:
if (rule.action_list.mirror_to.analyzer_ip_address is not None):
return
raise Exception('mirror to ip not found in ACL rules for %s' % (fq_name))
@retries(5)
def check_acl_not_match_mirror_to_ip(self, fq_name):
acl = None
try:
acl = self._vnc_lib.access_control_list_read(fq_name)
except NoIdError:
return
for rule in acl.access_control_list_entries.acl_rule:
if (rule.action_list.mirror_to.analyzer_ip_address is not None):
raise Exception('mirror to ip %s found in ACL rules for %s' % (fq_name))
return
@retries(10)
def check_vrf_assign_table(self, vmi_fq_name, floating_ip, is_present = True):
vmi = self._vnc_lib.virtual_machine_interface_read(vmi_fq_name)
if is_present:
self.assertEqual(vmi.get_vrf_assign_table().vrf_assign_rule[1].match_condition.src_address.subnet.ip_prefix, floating_ip)
else:
try:
self.assertEqual(vmi.get_vrf_assign_table().vrf_assign_rule[1].match_condition.src_address.subnet.ip_prefix, floating_ip)
raise Exception('floating is still present: ' + floating_ip)
except:
pass
@retries(5)
def check_st_vm_is_deleted(self, name):
vm_obj = config_db.VirtualMachineST.get(name)
if vm_obj is not None:
raise Exception('vm %s still exists' % name)
return
@retries(5)
def check_default_ri_rtgt_imported(self, fq_name, service_ri_fq_name):
ri = self._vnc_lib.routing_instance_read(fq_name)
service_ri = self._vnc_lib.routing_instance_read(service_ri_fq_name)
to_fq_names = [rt_ref['to'] for rt_ref in service_ri.get_route_target_refs()]
for rt_ref in ri.get_route_target_refs() or []:
if rt_ref['to'] in to_fq_names:
return
raise Exception('%s not imported to service_ri:%s ' % (rt_ref['to'],
service_ri_fq_name))
@retries(5)
def check_default_ri_rtgt_not_imported(self, fq_name, service_ri_fq_name):
ri = self._vnc_lib.routing_instance_read(fq_name)
service_ri = self._vnc_lib.routing_instance_read(service_ri_fq_name)
to_fq_names = [rt_ref['to'] for rt_ref in service_ri.get_route_target_refs()]
for rt_ref in ri.get_route_target_refs() or []:
if rt_ref['to'] in to_fq_names:
raise Exception('%s imported to service_ri:%s ' % (rt_ref['to'],
service_ri_fq_name))
@retries(5)
def delete_vn(self, fq_name):
try:
self._vnc_lib.virtual_network_delete(fq_name=fq_name)
print 'vn deleted'
except RefsExistError:
print "retrying ... ", test_common.lineno()
raise Exception('virtual network %s still exists' % str(fq_name))
@retries(5)
def check_acl_action_assign_rules(self, fq_name, vn1_fq_name, vn2_fq_name, sc_ri_fq_name):
acl = self._vnc_lib.access_control_list_read(fq_name)
for rule in acl.access_control_list_entries.acl_rule:
if (rule.match_condition.src_address.virtual_network == vn1_fq_name and
rule.match_condition.dst_address.virtual_network == vn2_fq_name):
if rule.action_list.assign_routing_instance == sc_ri_fq_name:
return
raise Exception('vrf assign for nets %s/%s not matched in ACL rules for %s; sc: %s' %
(vn1_fq_name, vn2_fq_name, fq_name, sc_ri_fq_name))
@retries(5)
def check_all_vmis_are_deleted(self):
vmi_list = self._vnc_lib.virtual_machine_interfaces_list()
if vmi_list['virtual-machine-interfaces']:
raise Exception('virtual machine interfaces still exist' + str(vmi_list))
print 'all virtual machine interfaces deleted'
class TestServicePolicy(STTestCase, VerifyServicePolicy):
def service_policy_test_with_version(self, version=None):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, ['10.0.0.0/24', '1000::/16'])
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, ['20.0.0.0/24', '2000::/16'])
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name], version=version)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
self.check_acl_action_assign_rules(vn1_obj.get_fq_name(), vn1_obj.get_fq_name_str(),
vn2_obj.get_fq_name_str(), ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self.check_acl_action_assign_rules(vn1_obj.get_fq_name(), vn2_obj.get_fq_name_str(),
vn1_obj.get_fq_name_str(), ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self.check_acl_action_assign_rules(vn2_obj.get_fq_name(), vn2_obj.get_fq_name_str(),
vn1_obj.get_fq_name_str(), ':'.join(self.get_ri_name(vn2_obj, sc_ri_name)))
self.check_acl_action_assign_rules(vn2_obj.get_fq_name(), vn1_obj.get_fq_name_str(),
vn2_obj.get_fq_name_str(), ':'.join(self.get_ri_name(vn2_obj, sc_ri_name)))
si_name = 'default-domain:default-project:' + service_name
if version == 2:
v4_service_chain_address = '10.0.0.251'
v6_service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffb'
else:
v4_service_chain_address = '10.0.0.252'
v6_service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffc'
sci = ServiceChainInfo(prefix = ['10.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
service_chain_address = v4_service_chain_address,
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci.prefix = ['1000::/16']
sci.service_chain_address = v6_service_chain_address
self.check_v6_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address = v4_service_chain_address,
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
sci.prefix = ['2000::/16']
sci.service_chain_address = v6_service_chain_address
self.check_v6_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
vn1_obj.set_multi_policy_service_chains_enabled(True)
vn2_obj.set_multi_policy_service_chains_enabled(True)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_ref_not_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_not_present(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn2_obj))
vn1_obj.set_multi_policy_service_chains_enabled(False)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_default_ri_rtgt_not_imported(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_default_ri_rtgt_not_imported(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_ri_ref_not_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn2_obj))
rp_name = self.id() + 'rp1'
rp = RoutingPolicy(rp_name)
si_obj = self._vnc_lib.service_instance_read(fq_name_str=si_name)
si_rp = RoutingPolicyServiceInstanceType(left_sequence='1.0')
rp.add_service_instance(si_obj, si_rp)
self._vnc_lib.routing_policy_create(rp)
self.wait_to_get_object(config_db.RoutingPolicyST,
rp.get_fq_name_str())
ident_name = self.get_obj_imid(rp)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
rp.del_service_instance(si_obj)
self._vnc_lib.routing_policy_update(rp)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.routing_policy_delete(id=rp.uuid)
rlist = RouteListType(route=['100.0.0.0/24'])
ra = RouteAggregate('ra1', aggregate_route_entries=rlist)
sit = ServiceInterfaceTag(interface_type='left')
ra.add_service_instance(si_obj, sit)
self._vnc_lib.route_aggregate_create(ra)
self.wait_to_get_object(config_db.RouteAggregateST,
ra.get_fq_name_str())
ra = self._vnc_lib.route_aggregate_read(id=ra.uuid)
self.assertEqual(ra.get_aggregate_route_nexthop(), v4_service_chain_address)
ident_name = self.get_obj_imid(ra)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
ra.del_service_instance(si_obj)
self._vnc_lib.route_aggregate_update(ra)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.route_aggregate_delete(id=ra.uuid)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end service_policy_test_with_version
def test_service_policy(self):
self.service_policy_test_with_version()
self.service_policy_test_with_version(2)
# end test_service_policy
def test_service_policy_with_any(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, ['10.0.0.0/24'])
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, ['20.0.0.0/24'])
# create vn3
vn3_name = self.id() + 'vn3'
vn3_obj = self.create_virtual_network(vn3_name, ['30.0.0.0/24'])
service_name = self.id() + 's1'
np1 = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np1, vnp)
np2 = self.create_network_policy('any', vn2_obj)
np2.get_network_policy_entries().policy_rule[0].set_action_list(
copy.deepcopy(np1.get_network_policy_entries().policy_rule[0].action_list))
np2.set_network_policy_entries(np2.get_network_policy_entries())
self._vnc_lib.network_policy_update(np2)
vn2_obj.set_network_policy(np2, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc(vn1_obj.get_fq_name_str(),
vn2_obj.get_fq_name_str())
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
si_name = 'default-domain:default-project:' + service_name
sci = ServiceChainInfo(prefix = ['10.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
service_chain_address = '10.0.0.252',
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address = '10.0.0.252',
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
np3 = self.create_network_policy(vn3_obj, vn2_obj)
np3.get_network_policy_entries().policy_rule[0].set_action_list(
copy.deepcopy(np1.get_network_policy_entries().policy_rule[0].action_list))
np3.set_network_policy_entries(np3.get_network_policy_entries())
self._vnc_lib.network_policy_update(np3)
vn3_obj.set_network_policy(np3, vnp)
self._vnc_lib.virtual_network_update(vn3_obj)
sc = self.wait_to_get_sc(vn3_obj.get_fq_name_str(),
vn2_obj.get_fq_name_str())
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn3_obj),
self.get_ri_name(vn3_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
si_name = 'default-domain:default-project:' + service_name
sci = ServiceChainInfo(prefix = ['30.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn3_obj)),
service_chain_address = '30.0.0.252',
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address = '30.0.0.252',
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn3_obj, sc_ri_name), sci)
vn1_obj.del_network_policy(np1)
vn2_obj.del_network_policy(np2)
vn3_obj.del_network_policy(np3)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self._vnc_lib.virtual_network_update(vn3_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np1)
self._vnc_lib.network_policy_delete(id=np2.uuid)
self._vnc_lib.network_policy_delete(id=np3.uuid)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn3_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn3_obj))
# end test_service_policy_with_any
def test_service_policy_no_vm(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
np.network_policy_entries.policy_rule[0].action_list.apply_service = ["default-domain:default-project:"+service_name]
np.set_network_policy_entries(np.network_policy_entries)
self._vnc_lib.network_policy_update(np)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_name))
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
np.network_policy_entries.policy_rule[0].action_list.apply_service = []
np.set_network_policy_entries(np.network_policy_entries)
self._vnc_lib.network_policy_update(np)
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_service_policy_no_vm
def test_service_policy_delete_vns_deletes_scs(self):
# Test to check deleting VNs without deleting the
# policy associated, deletes the service chain.
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
np.network_policy_entries.policy_rule[0].action_list.apply_service = ["default-domain:default-project:"+service_name]
np.set_network_policy_entries(np.network_policy_entries)
self._vnc_lib.network_policy_update(np)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_service_chain_is_deleted(sc_uuid=sc)
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_vn_is_deleted(uuid=vn2_obj.uuid)
# end test_service_policy_delete_vns_deletes_scs
def test_multi_service_in_policy(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_names = [self.id() + 's1', self.id() + 's2', self.id() + 's3']
np = self.create_network_policy(vn1_obj, vn2_obj, service_names, auto_policy=False, service_mode='in-network')
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
vn1_uuid = self._vnc_lib.virtual_network_update(vn1_obj)
vn2_uuid = self._vnc_lib.virtual_network_update(vn2_obj)
for obj in [vn1_obj, vn2_obj]:
ident_name = self.get_obj_imid(obj)
gevent.sleep(2)
ifmap_ident = self.assertThat(FakeIfmapClient._graph, Contains(ident_name))
sc = self.wait_to_get_sc()
sc_ri_names = ['service-'+sc+'-default-domain_default-project_' + s for s in service_names]
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_names[0]))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_names[2]),
self.get_ri_name(vn2_obj))
si_name = 'default-domain:default-project:%s.test_multi_service_in_policys3' % self._class_str()
sci = ServiceChainInfo(prefix = ['10.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
service_chain_address = '20.0.0.250',
service_instance = si_name,
source_routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
)
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_names[2]), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address = '10.0.0.250',
service_instance = si_name,
source_routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
)
self.check_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_names[2]), sci)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_names[0]))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj, sc_ri_names[0]))
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np)
self.delete_vn(fq_name=vn1_obj.get_fq_name())
self.delete_vn(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_multi_service_in_policy
def test_multi_service_policy(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_names = [self.id() + 's1', self.id() + 's2', self.id() + 's3']
np = self.create_network_policy(vn1_obj, vn2_obj, service_names)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_names = ['service-'+sc+'-default-domain_default-project_' + s for s in service_names]
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_names[0]))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_names[-1]),
self.get_ri_name(vn2_obj))
self.check_service_chain_prefix_match(fq_name=self.get_ri_name(vn2_obj, sc_ri_names[0]),
prefix='10.0.0.0/24')
self.check_service_chain_ip(sc_ri_names[0])
self.check_service_chain_ip(sc_ri_names[1])
self.check_service_chain_ip(sc_ri_names[2])
vmi_fq_names = [['default-domain', 'default-project',
'default-domain__default-project__%s__1__%s' %
(service_name, if_type)]
for service_name in service_names for if_type in ('left__1', 'right__2')]
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[0], service_names[0], '10.0.0.252')
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[1], service_names[1], '10.0.0.251')
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[2], service_names[2], '10.0.0.250')
old_service_list = np.network_policy_entries.policy_rule[0].action_list.apply_service
np.network_policy_entries.policy_rule[0].action_list.apply_service = \
old_service_list[:-1]
np.set_network_policy_entries(np.network_policy_entries)
self._vnc_lib.network_policy_update(np)
sc_old = sc
for i in range(0, 5):
sc = self.wait_to_get_sc()
if sc_old == sc:
gevent.sleep(1)
continue
sc_ri_names = ['service-'+sc+'-default-domain_default-project_' + s for s in service_names]
try:
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[2], service_names[2], '10.0.0.250')
gevent.sleep(1)
except Exception:
break
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[0], service_names[0], '10.0.0.252')
self.check_service_chain_pbf_rules(vn1_obj, vn2_obj, sc_ri_names[1], service_names[1], '10.0.0.251')
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_names[0]))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_names[1]))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_names[2]))
vn1_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
np.network_policy_entries.policy_rule[0].action_list.apply_service = \
old_service_list
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_multi_service_policy
def test_multi_policy_service_chain(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, ['10.0.0.0/24', '1000::/16'])
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, ['20.0.0.0/24', '2000::/16'])
policies = []
for i in range(1, 3):
service_name = self.id() + 's%s' % i
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
npe = np.network_policy_entries
npe.policy_rule[0].src_ports[0].start_port = i
npe.policy_rule[0].src_ports[0].end_port = i
np.set_network_policy_entries(npe)
self._vnc_lib.network_policy_update(np)
seq = SequenceType(1, i)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.add_network_policy(np, vnp)
vn2_obj.add_network_policy(np, vnp)
policies.append(np)
vn1_obj.set_multi_policy_service_chains_enabled(True)
vn2_obj.set_multi_policy_service_chains_enabled(True)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
for i in range(1, 3):
service_name = self.id() + 's%s' % i
si_name = 'default-domain:default-project:' + service_name
sc = self.wait_to_get_sc(si_name=si_name)
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_not_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_not_present(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn2_obj))
sci = ServiceChainInfo(prefix = ['10.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
service_chain_address = '10.0.0.%s' % (253-i),
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci.prefix = ['1000::/16']
if i == 1:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffc'
else:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffb'
self.check_v6_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address = '10.0.0.%s' % (253-i),
service_instance = si_name)
self.check_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
sci.prefix = ['2000::/16']
if i == 1:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffc'
else:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffb'
self.check_v6_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
rp = RoutingPolicy('rp1')
si_obj = self._vnc_lib.service_instance_read(fq_name_str=si_name)
si_rp = RoutingPolicyServiceInstanceType(left_sequence='1.0')
rp.add_service_instance(si_obj, si_rp)
self._vnc_lib.routing_policy_create(rp)
self.wait_to_get_object(config_db.RoutingPolicyST,
rp.get_fq_name_str())
ident_name = self.get_obj_imid(rp)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
rp.del_service_instance(si_obj)
self._vnc_lib.routing_policy_update(rp)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.routing_policy_delete(id=rp.uuid)
rlist = RouteListType(route=['100.0.0.0/24'])
ra = RouteAggregate('ra1', aggregate_route_entries=rlist)
sit = ServiceInterfaceTag(interface_type='left')
ra.add_service_instance(si_obj, sit)
self._vnc_lib.route_aggregate_create(ra)
self.wait_to_get_object(config_db.RouteAggregateST,
ra.get_fq_name_str())
ra = self._vnc_lib.route_aggregate_read(id=ra.uuid)
self.assertEqual(ra.get_aggregate_route_nexthop(), '10.0.0.%s' % (253-i))
ident_name = self.get_obj_imid(ra)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
ra.del_service_instance(si_obj)
self._vnc_lib.route_aggregate_update(ra)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.route_aggregate_delete(id=ra.uuid)
for np in policies:
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
for np in policies:
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_multi_policy_service_chain
def _test_multi_rule_service_chain(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, ['1.1.1.0/24',
'1.1.2.0/24',
'1000::/16'])
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, ['20.0.0.0/24', '2000::/16'])
rules = []
rule1 = {"protocol": "icmp",
"direction": "<>",
"src-port": 'any',
"src": [{"type": "cidr", "value": "1.1.1.0/24"},
{"type": "vn", "value": vn1_obj}],
"dst": [{"type": "vn", "value": vn2_obj}],
"dst-port": 'any',
"action": "pass",
"service_list": [self.id() + 's1'],
"service_kwargs": {},
"auto_policy": False
}
rule2 = {"protocol": "icmp",
"direction": "<>",
"src-port": 'any',
"src": [{"type": "vn", "value": vn1_obj},
{"type": "cidr", "value": "1.1.2.0/24"}],
"dst": {"type": "vn", "value": vn2_obj},
"dst-port": 'any',
"action": "pass",
"service_list": [self.id() + 's2'],
"service_kwargs": {},
"auto_policy": False
}
rules.append(rule1)
rules.append(rule2)
np = self.create_network_policy_with_multiple_rules(rules)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.add_network_policy(np, vnp)
vn2_obj.add_network_policy(np, vnp)
vn1_obj.set_multi_policy_service_chains_enabled(True)
vn2_obj.set_multi_policy_service_chains_enabled(True)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
for i in range(1, 3):
service_name = self.id() + 's%s' % i
si_name = 'default-domain:default-project:' + service_name
sc = self.wait_to_get_sc(si_name=si_name)
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_not_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_not_present(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_default_ri_rtgt_imported(self.get_ri_name(vn2_obj),
self.get_ri_name(vn2_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn2_obj))
sci = ServiceChainInfo(prefix=['1.1.1.0/24', '1.1.2.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn1_obj)),
service_instance = si_name)
service_chain_address = '1.1.1.%s' % (253-i)
sci.service_chain_address = service_chain_address
self.check_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci.prefix = ['1000::/16']
if i == 1:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffc'
else:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffb'
self.check_v6_service_chain_info(self.get_ri_name(vn2_obj, sc_ri_name), sci)
sci = ServiceChainInfo(prefix = ['20.0.0.0/24'],
routing_instance = ':'.join(self.get_ri_name(vn2_obj)),
service_chain_address=service_chain_address,
service_instance=si_name)
self.check_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
sci.prefix = ['2000::/16']
if i == 1:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffc'
else:
sci.service_chain_address = '1000:ffff:ffff:ffff:ffff:ffff:ffff:fffb'
self.check_v6_service_chain_info(self.get_ri_name(vn1_obj, sc_ri_name), sci)
rp = RoutingPolicy('rp1')
si_obj = self._vnc_lib.service_instance_read(fq_name_str=si_name)
si_rp = RoutingPolicyServiceInstanceType(left_sequence='1.0')
rp.add_service_instance(si_obj, si_rp)
self._vnc_lib.routing_policy_create(rp)
self.wait_to_get_object(config_db.RoutingPolicyST,
rp.get_fq_name_str())
ident_name = self.get_obj_imid(rp)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
rp.del_service_instance(si_obj)
self._vnc_lib.routing_policy_update(rp)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.routing_policy_delete(id=rp.uuid)
rlist = RouteListType(route=['100.0.0.0/24'])
ra = RouteAggregate('ra1', aggregate_route_entries=rlist)
sit = ServiceInterfaceTag(interface_type='left')
ra.add_service_instance(si_obj, sit)
self._vnc_lib.route_aggregate_create(ra)
self.wait_to_get_object(config_db.RouteAggregateST,
ra.get_fq_name_str())
ra = self._vnc_lib.route_aggregate_read(id=ra.uuid)
self.assertEqual(ra.get_aggregate_route_nexthop(), service_chain_address)
ident_name = self.get_obj_imid(ra)
self.wait_to_get_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
ra.del_service_instance(si_obj)
self._vnc_lib.route_aggregate_update(ra)
self.wait_to_remove_link(ident_name, ':'.join(self.get_ri_name(vn1_obj, sc_ri_name)))
self._vnc_lib.route_aggregate_delete(id=ra.uuid)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_vn_is_deleted(uuid=vn2_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_multi_rule_service_chain
def test_st_restart_service_chain_delete(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn1_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn1_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.clear_pending_updates()
vn2_obj.clear_pending_updates()
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
si_name = 'default-domain:default-project:' + service_name
rp_name = self.id() + 'rp1'
rp = RoutingPolicy(rp_name)
si_obj = self._vnc_lib.service_instance_read(fq_name_str=si_name)
si_rp = RoutingPolicyServiceInstanceType(left_sequence='1.0')
rp.add_service_instance(si_obj, si_rp)
self._vnc_lib.routing_policy_create(rp)
self.wait_to_get_object(config_db.RoutingPolicyST,
rp.get_fq_name_str())
rlist = RouteListType(route=['100.0.0.0/24'])
ra = RouteAggregate('ra1', aggregate_route_entries=rlist)
sit = ServiceInterfaceTag(interface_type='left')
ra.add_service_instance(si_obj, sit)
self._vnc_lib.route_aggregate_create(ra)
self.wait_to_get_object(config_db.RouteAggregateST,
ra.get_fq_name_str())
sc = self.wait_to_get_sc()
sc_ri_name = ('service-' + sc + '-default-domain_default-project_'
+ service_name)
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
# stop st
test_common.kill_schema_transformer(self._st_greenlet)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
ra.del_service_instance(si_obj)
self._vnc_lib.route_aggregate_update(ra)
rp.del_service_instance(si_obj)
self._vnc_lib.routing_policy_update(rp)
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_all_vmis_are_deleted()
# start st on a free port
self._st_greenlet = gevent.spawn(test_common.launch_schema_transformer,
self.id(), self._api_server_ip, self._api_server_port)
# check if all ri's are deleted
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj, sc_ri_name))
#end
# test service chain configuration while st is restarted
def test_st_restart_service_chain(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_name = ('service-' + sc + '-default-domain_default-project_'
+ service_name)
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
# stop st and wait for sometime
test_common.kill_schema_transformer(self._st_greenlet)
gevent.sleep(5)
# start st on a free port
self._st_greenlet = gevent.spawn(test_common.launch_schema_transformer,
self.id(), self._api_server_ip, self._api_server_port)
#check service chain state
sc = self.wait_to_get_sc()
sc_ri_name = ('service-' + sc + '-default-domain_default-project_'
+ service_name)
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
#cleanup
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
#check if all ri's are deleted
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj, sc_ri_name))
# end test_st_restart_service_chain
def test_analyzer(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(
vn1_obj, vn2_obj, mirror_service=service_name, auto_policy=False,
service_mode='transparent', service_type='analyzer')
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
vn1_uuid = self._vnc_lib.virtual_network_update(vn1_obj)
vn2_uuid = self._vnc_lib.virtual_network_update(vn2_obj)
for obj in [vn1_obj, vn2_obj]:
ident_name = self.get_obj_imid(obj)
gevent.sleep(2)
ifmap_ident = self.assertThat(FakeIfmapClient._graph, Contains(ident_name))
svc_ri_fq_name = 'default-domain:default-project:svc-vn-left:svc-vn-left'.split(':')
self.check_ri_ref_present(svc_ri_fq_name, self.get_ri_name(vn1_obj))
self.check_ri_ref_present(svc_ri_fq_name, self.get_ri_name(vn2_obj))
self.check_acl_match_mirror_to_ip(self.get_ri_name(vn1_obj))
self.check_acl_match_nets(self.get_ri_name(vn1_obj),
':'.join(vn1_obj.get_fq_name()),
':'.join(vn2_obj.get_fq_name()))
self.check_acl_match_nets(self.get_ri_name(vn2_obj),
':'.join(vn2_obj.get_fq_name()),
':'.join(vn1_obj.get_fq_name()))
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_acl_not_match_mirror_to_ip(self.get_ri_name(vn1_obj))
self.check_acl_not_match_nets(self.get_ri_name(vn1_obj),
':'.join(vn1_obj.get_fq_name()),
':'.join(vn2_obj.get_fq_name()))
self.check_acl_not_match_nets(self.get_ri_name(vn2_obj),
':'.join(vn2_obj.get_fq_name()),
':'.join(vn1_obj.get_fq_name()))
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
self.check_ri_is_deleted(fq_name=vn1_obj.fq_name+[vn1_obj.name])
self.check_ri_is_deleted(fq_name=vn2_obj.fq_name+[vn2_obj.name])
#self.check_ri_is_deleted(fq_name=vn2_obj.fq_name+[vn2_obj.name])
def test_service_and_analyzer_policy(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
analyzer_service_name = self.id() + '_analyzer'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name],
analyzer_service_name)
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
self.check_service_chain_prefix_match(fq_name=self.get_ri_name(vn2_obj, sc_ri_name),
prefix='10.0.0.0/24')
svc_ri_fq_name = 'default-domain:default-project:svc-vn-left:svc-vn-left'.split(':')
self.check_ri_ref_present(svc_ri_fq_name, self.get_ri_name(vn1_obj))
self.check_ri_ref_present(svc_ri_fq_name, self.get_ri_name(vn2_obj))
self.check_acl_match_mirror_to_ip(self.get_ri_name(vn1_obj))
self.check_acl_match_nets(self.get_ri_name(vn1_obj),
':'.join(vn1_obj.get_fq_name()),
':'.join(vn2_obj.get_fq_name()))
self.check_acl_match_nets(self.get_ri_name(vn2_obj),
':'.join(vn2_obj.get_fq_name()),
':'.join(vn1_obj.get_fq_name()))
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_service_and_analyzer_policy
def test_fip(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name],
service_mode='in-network',
auto_policy=True)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
vmi_fq_name = 'default-domain:default-project:default-domain__default-project__%s.test_fips1__1__left__1' % self._class_str()
vmi = self._vnc_lib.virtual_machine_interface_read(vmi_fq_name.split(':'))
vn3_name = 'vn-public'
vn3_obj = VirtualNetwork(vn3_name)
vn3_obj.set_router_external(True)
ipam3_obj = NetworkIpam('ipam3')
self._vnc_lib.network_ipam_create(ipam3_obj)
vn3_obj.add_network_ipam(ipam3_obj, VnSubnetsType(
[IpamSubnetType(SubnetType("192.168.7.0", 24))]))
vn3_uuid = self._vnc_lib.virtual_network_create(vn3_obj)
fip_pool_name = 'vn_public_fip_pool'
fip_pool = FloatingIpPool(fip_pool_name, vn3_obj)
self._vnc_lib.floating_ip_pool_create(fip_pool)
fip_obj = FloatingIp("fip1", fip_pool)
default_project = self._vnc_lib.project_read(
fq_name=[u'default-domain', u'default-project'])
fip_obj.set_project(default_project)
fip_uuid = self._vnc_lib.floating_ip_create(fip_obj)
fip_obj.set_virtual_machine_interface(vmi)
self._vnc_lib.floating_ip_update(fip_obj)
fip_obj = self._vnc_lib.floating_ip_read(fip_obj.get_fq_name())
for obj in [fip_obj]:
ident_name = self.get_obj_imid(obj)
ifmap_ident = self.assertThat(FakeIfmapClient._graph,
Contains(ident_name))
self.wait_to_get_link(ident_name, vmi_fq_name)
fip = fip_obj.get_floating_ip_address()
self.check_vrf_assign_table(vmi.get_fq_name(), fip, True)
fip_fq_name = fip_obj.get_fq_name()
self._vnc_lib.floating_ip_delete(fip_fq_name)
self.wait_to_remove_link(self.get_obj_imid(vmi), fip_fq_name)
self.check_vrf_assign_table(vmi.get_fq_name(), fip, False)
self.delete_network_policy(np)
gevent.sleep(1)
self._vnc_lib.floating_ip_pool_delete(id=fip_pool.uuid)
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn3_obj.uuid)
self.check_ri_is_deleted(vn1_obj.fq_name+[vn1_obj.name])
self.check_ri_is_deleted(vn2_obj.fq_name+[vn2_obj.name])
self.check_ri_is_deleted(vn3_obj.fq_name+[vn3_obj.name])
def test_pnf_service(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name],
service_virtualization_type='physical-device')
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
self.check_service_chain_prefix_match(
fq_name=self.get_ri_name(vn2_obj, sc_ri_name), prefix='10.0.0.0/24')
ri1 = self._vnc_lib.routing_instance_read(fq_name=self.get_ri_name(vn1_obj))
self.assertEqual(ri1.get_routing_instance_has_pnf(), True)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_ri_refs_are_deleted(fq_name=self.get_ri_name(vn1_obj))
ri1 = self._vnc_lib.routing_instance_read(fq_name=self.get_ri_name(vn1_obj))
self.assertEqual(ri1.get_routing_instance_has_pnf(), False)
self.delete_network_policy(np)
self._vnc_lib.virtual_network_delete(fq_name=vn1_obj.get_fq_name())
self._vnc_lib.virtual_network_delete(fq_name=vn2_obj.get_fq_name())
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.check_ri_is_deleted(fq_name=self.get_ri_name(vn2_obj))
# end test_pnf_service
def test_interface_mirror(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
service_name = self.id() + 's1'
si_fq_name_str = self._create_service(
[('left', vn1_obj)], service_name, False,
service_mode='transparent', service_type='analyzer')
ident_name = self.get_obj_imid(vn1_obj)
gevent.sleep(2)
ifmap_ident = self.assertThat(FakeIfmapClient._graph, Contains(ident_name))
# create virtual machine interface with interface mirror property
vmi_name = self.id() + 'vmi1'
vmi_fq_name = ['default-domain', 'default-project', vmi_name]
vmi = VirtualMachineInterface(vmi_name, parent_type='project',
fq_name=vmi_fq_name)
vmi.add_virtual_network(vn1_obj)
props = VirtualMachineInterfacePropertiesType()
mirror_type = InterfaceMirrorType()
mirror_act_type = MirrorActionType()
mirror_act_type.analyzer_name = 'default-domain:default-project:%s.test_interface_mirrors1' % self._class_str()
mirror_type.mirror_to = mirror_act_type
props.interface_mirror = mirror_type
vmi.set_virtual_machine_interface_properties(props)
self._vnc_lib.virtual_machine_interface_create(vmi)
self.check_analyzer_ip(vmi_fq_name)
props = VirtualMachineInterfacePropertiesType()
mirror_type = InterfaceMirrorType()
mirror_act_type = MirrorActionType()
mirror_act_type.analyzer_name = None
mirror_type.mirror_to = mirror_act_type
props.interface_mirror = mirror_type
vmi.set_virtual_machine_interface_properties(props)
self._vnc_lib.virtual_machine_interface_update(vmi)
self.check_analyzer_no_ip(vmi_fq_name)
self._vnc_lib.virtual_machine_interface_delete(id=vmi.uuid)
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self.check_vn_is_deleted(uuid=vn1_obj.uuid)
self.delete_service(si_fq_name_str)
#end test_interface_mirror
def test_transit_vn(self):
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
sc = self.wait_to_get_sc()
sc_ri_name = 'service-'+sc+'-default-domain_default-project_' + service_name
#basic checks
self.check_ri_ref_present(self.get_ri_name(vn1_obj),
self.get_ri_name(vn1_obj, sc_ri_name))
self.check_ri_ref_present(self.get_ri_name(vn2_obj, sc_ri_name),
self.get_ri_name(vn2_obj))
self.check_service_chain_prefix_match(
fq_name=self.get_ri_name(vn2_obj, sc_ri_name), prefix='10.0.0.0/24')
vn1_st = config_db.VirtualNetworkST.get(vn1_obj.get_fq_name_str())
rt_vn1 = vn1_st.get_route_target()
#vn1 rt is in not sc ri
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, False)
#set transit and check vn1 rt is in sc ri
vn_props = VirtualNetworkType()
vn_props.allow_transit = True
vn1_obj.set_virtual_network_properties(vn_props)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj, sc_ri_name),
rt_vn1, True, 'export')
#unset transit and check vn1 rt is not in sc ri
vn_props.allow_transit = False
vn1_obj.set_virtual_network_properties(vn_props)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, False)
#set transit on both vn1, vn2 and check vn1 & vn2 rt's are in sc ri
vn_props.allow_transit = True
vn1_obj.set_virtual_network_properties(vn_props)
vn2_obj.set_virtual_network_properties(vn_props)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
vn2_st = config_db.VirtualNetworkST.get(vn2_obj.get_fq_name_str())
rt_vn2 = vn2_st.get_route_target()
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn2_obj,sc_ri_name),
rt_vn2, True, 'export')
#unset transit on both vn1, vn2 and check vn1 & vn2 rt's are not in sc ri
vn_props.allow_transit = False
vn1_obj.set_virtual_network_properties(vn_props)
vn2_obj.set_virtual_network_properties(vn_props)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, False)
self.check_rt_in_ri(self.get_ri_name(vn2_obj,sc_ri_name),
rt_vn2, False)
#test external rt
rtgt_list = RouteTargetList(route_target=['target:1:1'])
vn1_obj.set_route_target_list(rtgt_list)
rtgt_list = RouteTargetList(route_target=['target:2:1'])
vn1_obj.set_export_route_target_list(rtgt_list)
vn_props.allow_transit = True
vn1_obj.set_virtual_network_properties(vn_props)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:1', True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:1', False)
#modify external rt
rtgt_list = RouteTargetList(route_target=['target:1:2'])
vn1_obj.set_route_target_list(rtgt_list)
rtgt_list = RouteTargetList(route_target=['target:2:2'])
vn1_obj.set_export_route_target_list(rtgt_list)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:2', True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:2', False)
#have more than one external rt
rtgt_list = RouteTargetList(route_target=['target:1:1', 'target:1:2'])
vn1_obj.set_route_target_list(rtgt_list)
rtgt_list = RouteTargetList(route_target=['target:2:1', 'target:2:2'])
vn1_obj.set_export_route_target_list(rtgt_list)
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:1', True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:2', True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:1', False)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:2', False)
#unset external rt
vn1_obj.set_route_target_list(RouteTargetList())
vn1_obj.set_export_route_target_list(RouteTargetList())
self._vnc_lib.virtual_network_update(vn1_obj)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
rt_vn1, True, 'export')
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:1', False)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:1:2', False)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:1', False)
self.check_rt_in_ri(self.get_ri_name(vn1_obj,sc_ri_name),
'target:2:2', False)
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
self.check_ri_is_deleted(vn1_obj.fq_name+[vn1_obj.name])
self.check_ri_is_deleted(vn2_obj.fq_name+[vn2_obj.name])
self.delete_network_policy(np)
# end test_transit_vn
def test_misc(self):
# create a service chain
# find the service chain
# check sandesh message parameters
# make a copy of it
# check for the equality of these service chains
# create vn1
vn1_name = self.id() + 'vn1'
vn1_obj = self.create_virtual_network(vn1_name, '10.0.0.0/24')
# create vn2
vn2_name = self.id() + 'vn2'
vn2_obj = self.create_virtual_network(vn2_name, '20.0.0.0/24')
service_name = self.id() + 's1'
np = self.create_network_policy(vn1_obj, vn2_obj, [service_name])
seq = SequenceType(1, 1)
vnp = VirtualNetworkPolicyType(seq)
vn1_obj.set_network_policy(np, vnp)
vn2_obj.set_network_policy(np, vnp)
self._vnc_lib.virtual_network_update(vn1_obj)
self._vnc_lib.virtual_network_update(vn2_obj)
self.wait_to_get_sc(check_create=True)
sp_list1 = [PortType(start_port=5000, end_port=8000),
PortType(start_port=2000, end_port=3000)]
dp_list1 = [PortType(start_port=1000, end_port=1500),
PortType(start_port=500, end_port=800)]
service_names1 = [self.id() + 's1', self.id() + 's2', self.id() + 's3']
sp_list2 = [PortType(start_port=5000, end_port=8000),
PortType(start_port=2000, end_port=3000)]
dp_list2 = [PortType(start_port=1000, end_port=1500),
PortType(start_port=500, end_port=800)]
service_names2 = [self.id() + 's1', self.id() + 's2', self.id() + 's3']
sc11 = config_db.ServiceChain.find_or_create(
"vn1", "vn2", "<>", sp_list1, dp_list1, "icmp", service_names1)
#build service chain introspect and check if it has got right values
sandesh_sc = sc11.build_introspect()
self.assertEqual(sandesh_sc.left_virtual_network, sc11.left_vn)
self.assertEqual(sandesh_sc.right_virtual_network, sc11.right_vn)
self.assertEqual(sandesh_sc.protocol, sc11.protocol)
port_list = []
for sp in sp_list1:
port_list.append("%s-%s" % (sp.start_port, sp.end_port))
self.assertEqual(sandesh_sc.src_ports, ','.join(port_list))
port_list = []
for dp in dp_list1:
port_list.append("%s-%s" % (dp.start_port, dp.end_port))
self.assertEqual(sandesh_sc.dst_ports, ','.join(port_list))
self.assertEqual(sandesh_sc.direction, sc11.direction)
self.assertEqual(sandesh_sc.service_list, service_names1)
sc22 = config_db.ServiceChain.find_or_create(
"vn1", "vn2", "<>", sp_list1, dp_list1, "icmp", service_names2)
sc33 = copy.deepcopy(sc11)
# check for SC equality, sc11 && sc22 are references
self.assertEqual(sc11, sc22)
# check for SC equality, sc11 && sc33 are different
self.assertEqual(sc11, sc33)
# change values and test
sc33.protocol = "tcp"
self.assertTrue(sc11 != sc33)
sc33.service_list = []
self.assertTrue(sc11 != sc33)
sc33.direction = "<"
self.assertTrue(sc11 != sc33)
sc33.dp_list = []
self.assertTrue(sc11 != sc33)
sc33.sp_list = []
self.assertTrue(sc11 != sc33)
sc33.name = "dummy"
self.assertTrue(sc11 != sc33)
sc11.delete()
vn1_obj.del_network_policy(np)
vn2_obj.del_network_policy(np)
# create virtual machine and create VMI and set VM as parent of VMI
# perform delete operations
vm_name = self.id() + 'vm1'
vm = VirtualMachine(vm_name)
self._vnc_lib.virtual_machine_create(vm)
# create virtual machine interface
vmi_name = self.id() + 'vmi1'
vmi = VirtualMachineInterface(vmi_name, parent_type='virtual-machine',
fq_name=[vm_name, vmi_name])
vmi.add_virtual_network(vn1_obj)
self._vnc_lib.virtual_machine_interface_create(vmi)
self._vnc_lib.virtual_machine_interface_delete(id=vmi.uuid)
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
self._vnc_lib.virtual_machine_delete(id=vm.uuid)
self.check_st_vm_is_deleted(vm_name)
self.delete_network_policy(np)
#end test_misc
# end class TestServicePolicy
| 47.118848 | 137 | 0.641274 | 11,111 | 78,500 | 4.097381 | 0.03519 | 0.043492 | 0.042306 | 0.061108 | 0.873413 | 0.844045 | 0.818631 | 0.800224 | 0.781136 | 0.769934 | 0 | 0.031856 | 0.257401 | 78,500 | 1,665 | 138 | 47.147147 | 0.749117 | 0.027936 | 0 | 0.712991 | 0 | 0.000755 | 0.062138 | 0.022213 | 0 | 0 | 0 | 0 | 0.02568 | 0 | null | null | 0.003021 | 0.020393 | null | null | 0.003021 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6b1c1bd8eb02e04d3609748b5d7893ee22a32449 | 29,842 | py | Python | tradenity/resources/payment_token.py | tradenity/python-sdk | d13fbe23f4d6ff22554c6d8d2deaf209371adaf1 | [
"Apache-2.0"
] | 1 | 2020-03-19T04:09:17.000Z | 2020-03-19T04:09:17.000Z | tradenity/resources/payment_token.py | tradenity/python-sdk | d13fbe23f4d6ff22554c6d8d2deaf209371adaf1 | [
"Apache-2.0"
] | null | null | null | tradenity/resources/payment_token.py | tradenity/python-sdk | d13fbe23f4d6ff22554c6d8d2deaf209371adaf1 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Tradenity API
Tradenity eCommerce Rest API
Contact: support@tradenity.com
"""
from __future__ import absolute_import
import re
import pprint
# python 2 and python 3 compatibility library
import six
from tradenity.api_client import ApiClient
class PaymentToken(object):
swagger_types = {
'id': 'str',
'meta': 'InstanceMeta',
'customer': 'Customer',
'reusable': 'bool',
'status': 'str',
'token': 'str'
}
attribute_map = {
'id': 'id',
'meta': '__meta',
'customer': 'customer',
'reusable': 'reusable',
'status': 'status',
'token': 'token'
}
api_client = None
def __init__(self, id=None, meta=None, customer=None, reusable=None, status=None, token=None):
"""PaymentToken - a model defined in Swagger"""
self._id = id
self._meta = None
self._customer = None
self._reusable = None
self._status = None
self._token = None
self.discriminator = None
if meta is not None:
self.meta = meta
self.customer = customer
if reusable is not None:
self.reusable = reusable
self.status = status
self.token = token
@property
def id(self):
if self._id:
return self._id
elif self.meta is None:
return None
else:
self._id = self.meta.href.split("/")[-1]
return self._id
@id.setter
def id(self, new_id):
self._id = new_id
@property
def meta(self):
"""Gets the meta of this PaymentToken.
:return: The meta of this PaymentToken.
:rtype: InstanceMeta
"""
return self._meta
@meta.setter
def meta(self, meta):
"""Sets the meta of this PaymentToken.
:param meta: The meta of this PaymentToken.
:type: InstanceMeta
"""
self._meta = meta
@property
def customer(self):
"""Gets the customer of this PaymentToken.
:return: The customer of this PaymentToken.
:rtype: Customer
"""
return self._customer
@customer.setter
def customer(self, customer):
"""Sets the customer of this PaymentToken.
:param customer: The customer of this PaymentToken.
:type: Customer
"""
self._customer = customer
@property
def reusable(self):
"""Gets the reusable of this PaymentToken.
:return: The reusable of this PaymentToken.
:rtype: bool
"""
return self._reusable
@reusable.setter
def reusable(self, reusable):
"""Sets the reusable of this PaymentToken.
:param reusable: The reusable of this PaymentToken.
:type: bool
"""
self._reusable = reusable
@property
def status(self):
"""Gets the status of this PaymentToken.
:return: The status of this PaymentToken.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this PaymentToken.
:param status: The status of this PaymentToken.
:type: str
"""
allowed_values = ["new", "used", "expired"]
if status is not None and status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}"
.format(status, allowed_values)
)
self._status = status
@property
def token(self):
"""Gets the token of this PaymentToken.
:return: The token of this PaymentToken.
:rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this PaymentToken.
:param token: The token of this PaymentToken.
:type: str
"""
self._token = token
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PaymentToken, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentToken):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
@classmethod
def get_api_client(cls):
if cls.api_client is None:
cls.api_client = ApiClient.instance()
return cls.api_client
@classmethod
def find_all(cls, **kwargs):
return cls.list_all_payment_tokens(**kwargs)
@classmethod
def find_all_by(cls, **kwargs):
return cls.list_all_payment_tokens(**kwargs)
@classmethod
def find_one_by(cls, **kwargs):
results = cls.list_all_payment_tokens(**kwargs)
if len(results) > 0:
return results[0]
@classmethod
def find_by_id(cls, id):
return cls.get_payment_token_by_id(id)
def create(self):
new_instance = self.create_payment_token(self)
self.id = new_instance.id
return self
def update(self):
return self.update_payment_token_by_id(self.id, self)
def delete(self):
return self.delete_payment_token_by_id(self.id)
@classmethod
def create_payment_token(cls, payment_token, **kwargs):
"""Create PaymentToken
Create a new PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_payment_token(payment_token, async=True)
>>> result = thread.get()
:param async bool
:param PaymentToken payment_token: Attributes of paymentToken to create (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_payment_token_with_http_info(payment_token, **kwargs)
else:
(data) = cls._create_payment_token_with_http_info(payment_token, **kwargs)
return data
@classmethod
def _create_payment_token_with_http_info(cls, payment_token, **kwargs):
"""Create PaymentToken
Create a new PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_payment_token_with_http_info(payment_token, async=True)
>>> result = thread.get()
:param async bool
:param PaymentToken payment_token: Attributes of paymentToken to create (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payment_token']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'payment_token' is set
if ('payment_token' not in params or
params['payment_token'] is None):
raise ValueError("Missing the required parameter `payment_token` when calling `create_payment_token`")
collection_formats = {}
path_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payment_token' in params:
body_params = params['payment_token']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaymentToken',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def delete_payment_token_by_id(cls, payment_token_id, **kwargs):
"""Delete PaymentToken
Delete an instance of PaymentToken by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_payment_token_by_id(payment_token_id, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_payment_token_by_id_with_http_info(payment_token_id, **kwargs)
else:
(data) = cls._delete_payment_token_by_id_with_http_info(payment_token_id, **kwargs)
return data
@classmethod
def _delete_payment_token_by_id_with_http_info(cls, payment_token_id, **kwargs):
"""Delete PaymentToken
Delete an instance of PaymentToken by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_payment_token_by_id_with_http_info(payment_token_id, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payment_token_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'payment_token_id' is set
if ('payment_token_id' not in params or
params['payment_token_id'] is None):
raise ValueError("Missing the required parameter `payment_token_id` when calling `delete_payment_token_by_id`")
collection_formats = {}
path_params = {}
if 'payment_token_id' in params:
path_params['paymentTokenId'] = params['payment_token_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens/{paymentTokenId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def get_payment_token_by_id(cls, payment_token_id, **kwargs):
"""Find PaymentToken
Return single instance of PaymentToken by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_payment_token_by_id(payment_token_id, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to return (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_payment_token_by_id_with_http_info(payment_token_id, **kwargs)
else:
(data) = cls._get_payment_token_by_id_with_http_info(payment_token_id, **kwargs)
return data
@classmethod
def _get_payment_token_by_id_with_http_info(cls, payment_token_id, **kwargs):
"""Find PaymentToken
Return single instance of PaymentToken by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_payment_token_by_id_with_http_info(payment_token_id, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to return (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payment_token_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'payment_token_id' is set
if ('payment_token_id' not in params or
params['payment_token_id'] is None):
raise ValueError("Missing the required parameter `payment_token_id` when calling `get_payment_token_by_id`")
collection_formats = {}
path_params = {}
if 'payment_token_id' in params:
path_params['paymentTokenId'] = params['payment_token_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens/{paymentTokenId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaymentToken',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def list_all_payment_tokens(cls, **kwargs):
"""List PaymentTokens
Return a list of PaymentTokens
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_payment_tokens(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[PaymentToken]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._list_all_payment_tokens_with_http_info(**kwargs)
else:
(data) = cls._list_all_payment_tokens_with_http_info(**kwargs)
return data
@classmethod
def _list_all_payment_tokens_with_http_info(cls, **kwargs):
"""List PaymentTokens
Return a list of PaymentTokens
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_payment_tokens_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[PaymentToken]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'size', 'sort']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
if 'page' in params:
query_params.append(('page', params['page']))
if 'size' in params:
query_params.append(('size', params['size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='page[PaymentToken]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def replace_payment_token_by_id(cls, payment_token_id, payment_token, **kwargs):
"""Replace PaymentToken
Replace all attributes of PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_payment_token_by_id(payment_token_id, payment_token, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to replace (required)
:param PaymentToken payment_token: Attributes of paymentToken to replace (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_payment_token_by_id_with_http_info(payment_token_id, payment_token, **kwargs)
else:
(data) = cls._replace_payment_token_by_id_with_http_info(payment_token_id, payment_token, **kwargs)
return data
@classmethod
def _replace_payment_token_by_id_with_http_info(cls, payment_token_id, payment_token, **kwargs):
"""Replace PaymentToken
Replace all attributes of PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_payment_token_by_id_with_http_info(payment_token_id, payment_token, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to replace (required)
:param PaymentToken payment_token: Attributes of paymentToken to replace (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payment_token_id', 'payment_token']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'payment_token_id' is set
if ('payment_token_id' not in params or
params['payment_token_id'] is None):
raise ValueError("Missing the required parameter `payment_token_id` when calling `replace_payment_token_by_id`")
# verify the required parameter 'payment_token' is set
if ('payment_token' not in params or
params['payment_token'] is None):
raise ValueError("Missing the required parameter `payment_token` when calling `replace_payment_token_by_id`")
collection_formats = {}
path_params = {}
if 'payment_token_id' in params:
path_params['paymentTokenId'] = params['payment_token_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payment_token' in params:
body_params = params['payment_token']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens/{paymentTokenId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaymentToken',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def update_payment_token_by_id(cls, payment_token_id, payment_token, **kwargs):
"""Update PaymentToken
Update attributes of PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_payment_token_by_id(payment_token_id, payment_token, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to update. (required)
:param PaymentToken payment_token: Attributes of paymentToken to update. (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._update_payment_token_by_id_with_http_info(payment_token_id, payment_token, **kwargs)
else:
(data) = cls._update_payment_token_by_id_with_http_info(payment_token_id, payment_token, **kwargs)
return data
@classmethod
def _update_payment_token_by_id_with_http_info(cls, payment_token_id, payment_token, **kwargs):
"""Update PaymentToken
Update attributes of PaymentToken
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_payment_token_by_id_with_http_info(payment_token_id, payment_token, async=True)
>>> result = thread.get()
:param async bool
:param str payment_token_id: ID of paymentToken to update. (required)
:param PaymentToken payment_token: Attributes of paymentToken to update. (required)
:return: PaymentToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['payment_token_id', 'payment_token']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'payment_token_id' is set
if ('payment_token_id' not in params or
params['payment_token_id'] is None):
raise ValueError("Missing the required parameter `payment_token_id` when calling `update_payment_token_by_id`")
# verify the required parameter 'payment_token' is set
if ('payment_token' not in params or
params['payment_token'] is None):
raise ValueError("Missing the required parameter `payment_token` when calling `update_payment_token_by_id`")
collection_formats = {}
path_params = {}
if 'payment_token_id' in params:
path_params['paymentTokenId'] = params['payment_token_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'payment_token' in params:
body_params = params['payment_token']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/paymentTokens/{paymentTokenId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaymentToken',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 33.605856 | 124 | 0.614671 | 3,394 | 29,842 | 5.123748 | 0.059222 | 0.100748 | 0.048304 | 0.030362 | 0.843991 | 0.802645 | 0.794307 | 0.781886 | 0.775791 | 0.769408 | 0 | 0.000524 | 0.296193 | 29,842 | 887 | 125 | 33.643743 | 0.827453 | 0.02902 | 0 | 0.633684 | 0 | 0 | 0.147706 | 0.035548 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.010526 | null | null | 0.004211 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6b3ea979556108b422bb507cdd4d2a816525f9d9 | 56,972 | py | Python | sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py | malthe/azure-sdk-for-python | 0394ca66256f18fd45975b75ceea0e2527208abf | [
"MIT"
] | null | null | null | sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py | malthe/azure-sdk-for-python | 0394ca66256f18fd45975b75ceea0e2527208abf | [
"MIT"
] | null | null | null | sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py | malthe/azure-sdk-for-python | 0394ca66256f18fd45975b75ceea0e2527208abf | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import asyncio
import logging
import sys
import os
import pytest
import time
from datetime import datetime, timedelta
from azure.servicebus.aio import (
ServiceBusClient,
QueueClient,
Message,
BatchMessage,
DeferredMessage,
AutoLockRenew)
from azure.servicebus.common.message import PeekMessage
from azure.servicebus.common.constants import ReceiveSettleMode
from azure.servicebus.common.errors import (
ServiceBusError,
MessageLockExpired,
InvalidHandlerState,
MessageAlreadySettled,
AutoLockRenewTimeout,
MessageSendFailed,
MessageSettleFailed)
from devtools_testutils import AzureMgmtTestCase, CachedResourceGroupPreparer
from servicebus_preparer import CachedServiceBusNamespacePreparer, CachedServiceBusQueuePreparer, ServiceBusQueuePreparer
def get_logger(level):
azure_logger = logging.getLogger("azure")
if not azure_logger.handlers:
azure_logger.setLevel(level)
handler = logging.StreamHandler(stream=sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s'))
azure_logger.addHandler(handler)
uamqp_logger = logging.getLogger("uamqp")
if not uamqp_logger.handlers:
uamqp_logger.setLevel(logging.INFO)
uamqp_logger.addHandler(handler)
return azure_logger
_logger = get_logger(logging.DEBUG)
def print_message(message):
_logger.info("Receiving: {}".format(message))
_logger.debug("Time to live: {}".format(message.time_to_live))
_logger.debug("Sequence number: {}".format(message.sequence_number))
_logger.debug("Enqueue Sequence number: {}".format(message.enqueue_sequence_number))
_logger.debug("Partition ID: {}".format(message.partition_id))
_logger.debug("Partition Key: {}".format(message.partition_key))
_logger.debug("User Properties: {}".format(message.user_properties))
_logger.debug("Annotations: {}".format(message.annotations))
_logger.debug("Delivery count: {}".format(message.header.delivery_count))
try:
_logger.debug("Locked until: {}".format(message.locked_until))
_logger.debug("Lock Token: {}".format(message.lock_token))
except TypeError:
pass
_logger.debug("Enqueued time: {}".format(message.enqueued_time))
class ServiceBusQueueTests(AzureMgmtTestCase):
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_queue_client_conn_str_receive_handler_peeklock(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
queue_client = QueueClient.from_connection_string(
servicebus_namespace_connection_string,
name=servicebus_queue.name,
debug=False)
queue_client.get_properties()
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Handler message no. {}".format(i))
message.enqueue_sequence_number = i
await sender.send(message)
with pytest.raises(ValueError):
queue_client.get_receiver(session="test", idle_timeout=5)
receiver = queue_client.get_receiver(idle_timeout=5)
count = 0
async for message in receiver:
print_message(message)
count += 1
await message.complete()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_github_issue_7079_async(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
sb_client = ServiceBusClient.from_connection_string(
servicebus_namespace_connection_string, debug=False)
queue = sb_client.get_queue(servicebus_queue.name)
async with queue.get_sender() as sender:
for i in range(5):
await sender.send(Message("Message {}".format(i)))
messages = queue.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete, idle_timeout=5)
batch = await messages.fetch_next()
count = len(batch)
await messages.reconnect()
async for message in messages:
_logger.debug(message)
count += 1
assert count == 5
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_github_issue_6178_async(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
sb_client = ServiceBusClient.from_connection_string(
servicebus_namespace_connection_string, debug=False)
queue = sb_client.get_queue(servicebus_queue.name)
for i in range(3):
await queue.send(Message("Message {}".format(i)))
messages = queue.get_receiver(idle_timeout=60)
async for message in messages:
_logger.debug(message)
_logger.debug(message.sequence_number)
_logger.debug(message.enqueued_time)
_logger.debug(message.expired)
await message.complete()
await asyncio.sleep(40)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_queue_client_conn_str_receive_handler_receiveanddelete(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
queue_client = QueueClient.from_connection_string(
servicebus_namespace_connection_string,
name=servicebus_queue.name,
debug=False)
queue_client.get_properties()
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Handler message no. {}".format(i))
message.enqueue_sequence_number = i
await sender.send(message)
messages = []
receiver = queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete, idle_timeout=5)
async for message in receiver:
messages.append(message)
with pytest.raises(MessageAlreadySettled):
await message.complete()
assert not receiver.running
assert len(messages) == 10
time.sleep(30)
messages = []
receiver = queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete, idle_timeout=5)
async for message in receiver:
messages.append(message)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_queue_client_conn_str_receive_handler_with_stop(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
queue_client = QueueClient.from_connection_string(
servicebus_namespace_connection_string,
name=servicebus_queue.name,
debug=False)
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Stop message no. {}".format(i))
await sender.send(message)
messages = []
receiver = queue_client.get_receiver(idle_timeout=5)
async for message in receiver:
messages.append(message)
await message.complete()
if len(messages) >= 5:
break
assert receiver.running
assert len(messages) == 5
async with receiver:
async for message in receiver:
messages.append(message)
await message.complete()
if len(messages) >= 5:
break
assert not receiver.running
assert len(messages) == 6
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_simple(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Iter message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
print_message(message)
await message.complete()
with pytest.raises(MessageAlreadySettled):
await message.complete()
with pytest.raises(MessageAlreadySettled):
await message.renew_lock()
count += 1
with pytest.raises(InvalidHandlerState):
await receiver.__anext__()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_conn_str_client_iter_messages_with_abandon(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
client = ServiceBusClient.from_connection_string(servicebus_namespace_connection_string, debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Abandoned message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
print_message(message)
if not message.header.delivery_count:
count += 1
await message.abandon()
else:
assert message.header.delivery_count == 1
await message.complete()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_defer(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_client(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
deferred = await queue_client.receive_deferred_messages(deferred_messages, mode=ReceiveSettleMode.PeekLock)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
with pytest.raises(ValueError):
await message.complete()
with pytest.raises(ValueError):
await queue_client.settle_deferred_messages('foo', deferred)
await queue_client.settle_deferred_messages('completed', deferred)
with pytest.raises(ServiceBusError):
await queue_client.receive_deferred_messages(deferred_messages)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_complete(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as session:
deferred = await session.receive_deferred_messages(deferred_messages)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
assert message.lock_token
assert message.locked_until
assert message._receiver
await message.renew_lock()
await message.complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_deadletter(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as session:
deferred = await session.receive_deferred_messages(deferred_messages)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
await message.dead_letter("something")
count = 0
async with queue_client.get_deadletter_receiver(idle_timeout=5) as receiver:
async for message in receiver:
count += 1
print_message(message)
assert message.user_properties[b'DeadLetterReason'] == b'something'
assert message.user_properties[b'DeadLetterErrorDescription'] == b'something'
await message.complete()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_deletemode(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
count = 0
receiver = queue_client.get_receiver(idle_timeout=5)
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as receiver:
deferred = await receiver.receive_deferred_messages(deferred_messages, mode=ReceiveSettleMode.ReceiveAndDelete)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
with pytest.raises(MessageAlreadySettled):
await message.complete()
with pytest.raises(ServiceBusError):
deferred = await receiver.receive_deferred_messages(deferred_messages)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_not_found(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(3):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 3
with pytest.raises(ServiceBusError):
deferred = await queue_client.receive_deferred_messages([3, 4], mode=ReceiveSettleMode.PeekLock)
with pytest.raises(ServiceBusError):
deferred = await queue_client.receive_deferred_messages([5, 6, 7], mode=ReceiveSettleMode.PeekLock)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_receive_batch_with_deadletter(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Dead lettered message no. {}".format(i))
await sender.send(message)
count = 0
messages = await receiver.fetch_next()
while messages:
for message in messages:
print_message(message)
count += 1
await message.dead_letter(description="Testing")
messages = await receiver.fetch_next()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_receive_batch_with_retrieve_deadletter(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Dead lettered message no. {}".format(i))
await sender.send(message)
count = 0
messages = await receiver.fetch_next()
while messages:
for message in messages:
print_message(message)
await message.dead_letter(description="Testing queue deadletter")
count += 1
messages = await receiver.fetch_next()
with pytest.raises(InvalidHandlerState):
await receiver.fetch_next()
assert count == 10
async with queue_client.get_deadletter_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 10
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_session_fail(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
with pytest.raises(ValueError):
queue_client.get_receiver(session="test")
async with queue_client.get_sender(session="test") as sender:
await sender.send(Message("test session sender"))
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_browse_messages_client(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages = await queue_client.peek(5)
assert len(messages) == 5
assert all(isinstance(m, PeekMessage) for m in messages)
for message in messages:
print_message(message)
with pytest.raises(TypeError):
message.complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_browse_messages_with_receiver(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages = await receiver.peek(5)
assert len(messages) > 0
assert all(isinstance(m, PeekMessage) for m in messages)
for message in messages:
print_message(message)
with pytest.raises(TypeError):
message.complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_browse_empty_messages(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
messages = await receiver.peek(10)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_renew_message_locks(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
messages = []
locks = 3
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(locks):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages.extend(await receiver.fetch_next())
recv = True
while recv:
recv = await receiver.fetch_next()
messages.extend(recv)
try:
assert not message.expired
for m in messages:
time.sleep(5)
initial_expiry = m.locked_until
await m.renew_lock()
assert (m.locked_until - initial_expiry) >= timedelta(seconds=5)
finally:
await messages[0].complete()
await messages[1].complete()
time.sleep(30)
with pytest.raises(MessageLockExpired):
await messages[2].complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_queue_client_conn_str_receive_handler_with_autolockrenew(self, servicebus_namespace_connection_string, servicebus_queue, **kwargs):
queue_client = QueueClient.from_connection_string(
servicebus_namespace_connection_string,
name=servicebus_queue.name,
debug=False)
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("{}".format(i))
await sender.send(message)
renewer = AutoLockRenew()
messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async for message in receiver:
if not messages:
messages.append(message)
assert not message.expired
renewer.register(message, timeout=60)
print("Registered lock renew thread", message.locked_until, datetime.now())
await asyncio.sleep(50)
print("Finished first sleep", message.locked_until)
assert not message.expired
await asyncio.sleep(25)
print("Finished second sleep", message.locked_until, datetime.now())
assert message.expired
try:
await message.complete()
raise AssertionError("Didn't raise MessageLockExpired")
except MessageLockExpired as e:
assert isinstance(e.inner_exception, AutoLockRenewTimeout)
else:
if message.expired:
print("Remaining messages", message.locked_until, datetime.now())
assert message.expired
with pytest.raises(MessageLockExpired):
await message.complete()
else:
assert message.header.delivery_count >= 1
print("Remaining messages", message.locked_until, datetime.now())
messages.append(message)
await message.complete()
await renewer.shutdown()
assert len(messages) == 11
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_fail_send_messages(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
try:
queue_client = client.get_queue(servicebus_queue.name)
except MessageSendFailed:
pytest.skip("Open issue for uAMQP on OSX")
too_large = "A" * 1024 * 512
results = await queue_client.send(Message(too_large))
assert len(results) == 1
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
async with queue_client.get_sender() as sender:
with pytest.raises(MessageSendFailed):
await sender.send(Message(too_large))
async with queue_client.get_sender() as sender:
sender.queue_message(Message(too_large))
results = await sender.send_pending_messages()
assert len(results) == 1
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_by_servicebus_client_fail_send_batch_messages(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
pytest.skip("TODO: Pending bugfix in uAMQP")
def batch_data():
for i in range(3):
yield str(i) * 1024 * 256
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
results = await queue_client.send(BatchMessage(batch_data()))
assert len(results) == 4
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
async with queue_client.get_sender() as sender:
with pytest.raises(MessageSendFailed):
await sender.send(BatchMessage(batch_data()))
async with queue_client.get_sender() as sender:
sender.queue_message(BatchMessage(batch_data()))
results = await sender.send_pending_messages()
assert len(results) == 4
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_time_to_live(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id = uuid.uuid4()
message = Message(content)
message.time_to_live = timedelta(seconds=30)
await sender.send(message)
time.sleep(30)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert not messages
async with queue_client.get_deadletter_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 1
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', requires_duplicate_detection=True, dead_lettering_on_message_expiration=True)
async def test_async_queue_message_duplicate_detection(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
message_id = uuid.uuid4()
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message(str(i))
message.properties.message_id = message_id
await sender.send(message)
async with queue_client.get_receiver(idle_timeout=5) as receiver:
count = 0
async for message in receiver:
print_message(message)
assert message.properties.message_id == message_id
await message.complete()
count += 1
assert count == 1
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_connection_closed(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
with pytest.raises(MessageSettleFailed):
await messages[0].complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_expiry(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
time.sleep(30)
assert messages[0].expired
with pytest.raises(MessageLockExpired):
await messages[0].complete()
with pytest.raises(MessageLockExpired):
await messages[0].renew_lock()
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=30)
assert len(messages) == 1
print_message(messages[0])
assert messages[0].header.delivery_count > 0
await messages[0].complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_lock_renew(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
time.sleep(15)
await messages[0].renew_lock()
time.sleep(15)
await messages[0].renew_lock()
time.sleep(15)
assert not messages[0].expired
await messages[0].complete()
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_receive_and_delete(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
async with queue_client.get_sender() as sender:
message = Message("Receive and delete test")
await sender.send(message)
async with queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete) as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
received = messages[0]
print_message(received)
with pytest.raises(MessageAlreadySettled):
await received.complete()
with pytest.raises(MessageAlreadySettled):
await received.abandon()
with pytest.raises(MessageAlreadySettled):
await received.defer()
with pytest.raises(MessageAlreadySettled):
await received.dead_letter()
with pytest.raises(MessageAlreadySettled):
await received.renew_lock()
time.sleep(30)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
for m in messages:
print_message(m)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_message_batch(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
def message_content():
for i in range(5):
yield "Message no. {}".format(i)
async with queue_client.get_sender() as sender:
message = BatchMessage(message_content())
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
recv = True
while recv:
recv = await receiver.fetch_next(timeout=10)
messages.extend(recv)
assert len(messages) == 5
for m in messages:
print_message(m)
await m.complete()
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_schedule_message(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
enqueue_time = (datetime.utcnow() + timedelta(minutes=2)).replace(microsecond=0)
async with queue_client.get_receiver() as receiver:
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id = uuid.uuid4()
message = Message(content)
message.properties.message_id = message_id
message.schedule(enqueue_time)
await sender.send(message)
messages = await receiver.fetch_next(timeout=120)
if messages:
try:
data = str(messages[0])
assert data == content
assert messages[0].properties.message_id == message_id
assert messages[0].scheduled_enqueue_time == enqueue_time
assert messages[0].scheduled_enqueue_time == messages[0].enqueued_time.replace(microsecond=0)
assert len(messages) == 1
finally:
for m in messages:
await m.complete()
else:
raise Exception("Failed to receive scheduled message.")
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_schedule_multiple_messages(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
import uuid
queue_client = client.get_queue(servicebus_queue.name)
enqueue_time = (datetime.utcnow() + timedelta(minutes=2)).replace(microsecond=0)
messages = []
async with queue_client.get_receiver(prefetch=20) as receiver:
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id_a = uuid.uuid4()
message_a = Message(content)
message_a.properties.message_id = message_id_a
message_id_b = uuid.uuid4()
message_b = Message(content)
message_b.properties.message_id = message_id_b
tokens = await sender.schedule(enqueue_time, message_a, message_b)
assert len(tokens) == 2
recv = await receiver.fetch_next(timeout=120)
messages.extend(recv)
recv = await receiver.fetch_next(timeout=5)
messages.extend(recv)
if messages:
try:
data = str(messages[0])
assert data == content
assert messages[0].properties.message_id in (message_id_a, message_id_b)
assert messages[0].scheduled_enqueue_time == enqueue_time
assert messages[0].scheduled_enqueue_time == messages[0].enqueued_time.replace(microsecond=0)
assert len(messages) == 2
finally:
for m in messages:
await m.complete()
else:
raise Exception("Failed to receive scheduled message.")
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_async_queue_cancel_scheduled_messages(self, servicebus_namespace, servicebus_namespace_key_name, servicebus_namespace_primary_key, servicebus_queue, **kwargs):
client = ServiceBusClient(
service_namespace=servicebus_namespace.name,
shared_access_key_name=servicebus_namespace_key_name,
shared_access_key_value=servicebus_namespace_primary_key,
debug=False)
queue_client = client.get_queue(servicebus_queue.name)
enqueue_time = (datetime.utcnow() + timedelta(minutes=2)).replace(microsecond=0)
async with queue_client.get_receiver() as receiver:
async with queue_client.get_sender() as sender:
message_a = Message("Test scheduled message")
message_b = Message("Test scheduled message")
tokens = await sender.schedule(enqueue_time, message_a, message_b)
assert len(tokens) == 2
await sender.cancel_scheduled_messages(*tokens)
messages = await receiver.fetch_next(timeout=120)
assert len(messages) == 0
| 47.555927 | 230 | 0.680703 | 5,906 | 56,972 | 6.272773 | 0.048595 | 0.087187 | 0.062839 | 0.034551 | 0.875159 | 0.862067 | 0.839096 | 0.818204 | 0.801037 | 0.79005 | 0 | 0.007929 | 0.242874 | 56,972 | 1,197 | 231 | 47.595656 | 0.850933 | 0.005231 | 0 | 0.789778 | 0 | 0 | 0.04401 | 0.000459 | 0 | 0 | 0 | 0 | 0.092575 | 1 | 0.003857 | false | 0.000964 | 0.019286 | 0 | 0.025072 | 0.02893 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
861852c4fbe5d407624d32c97c52f3a13b802609 | 15,879 | py | Python | skeleton/migrations/0001_initial.py | l99fiamingo/ioniCup | ef69ee9e651a6f3482444ffcdc43102f8d02828d | [
"MIT"
] | 1 | 2019-01-23T19:23:00.000Z | 2019-01-23T19:23:00.000Z | skeleton/migrations/0001_initial.py | l99fiamingo/ioniCup | ef69ee9e651a6f3482444ffcdc43102f8d02828d | [
"MIT"
] | null | null | null | skeleton/migrations/0001_initial.py | l99fiamingo/ioniCup | ef69ee9e651a6f3482444ffcdc43102f8d02828d | [
"MIT"
] | null | null | null | # Generated by Django 2.1.3 on 2019-01-24 03:06
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AllStarGame',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16)),
('rules', models.CharField(blank=True, max_length=999)),
('slug', models.SlugField(blank=True)),
],
),
migrations.CreateModel(
name='Court',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=16)),
('importance', models.IntegerField(blank=True, default=0, validators=[django.core.validators.MinValueValidator(0)])),
],
),
migrations.CreateModel(
name='Day',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16)),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=16)),
('format', models.CharField(choices=[('Round-Robin', "All'italiana"), ('Elimination', 'Ad eliminazione')], default='Round-Robin', max_length=32)),
('number_of_teams', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('importance', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
],
),
migrations.CreateModel(
name='Human',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=16)),
('last_name', models.CharField(max_length=16)),
('jersey_size', models.CharField(blank=True, choices=[('XXS', 'XXS'), ('XS', 'XS'), ('S', 'S'), ('M', 'M'), ('L', 'L'), ('XL', 'XL'), ('XXL', 'XXL')], max_length=4)),
('slug', models.SlugField(blank=True)),
],
),
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('points_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('points_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('number', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_current_sixth', models.CharField(blank=True, choices=[('1', 'Primo'), ('2', 'Secondo'), ('3', 'Terzo'), ('4', 'Quarto'), ('5', 'Quinto'), ('6', 'Sesto'), ('7', 'Supplementare')], max_length=16)),
('sb_timer', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_partial_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_partial_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_1_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_1_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_2_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_2_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_3_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_3_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_4_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_4_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_5_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_5_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_6_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_6_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_7_sixth_A', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_7_sixth_B', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('sb_color_A', models.CharField(blank=True, choices=[('Black', 'Nero'), ('Silver', 'Argento'), ('Gray', 'Grigio'), ('White', 'Bianco'), ('Maroon', 'Amaranto'), ('Red', 'Rosso'), ('Orange', 'Arancione'), ('Purple', 'Viola'), ('Fuchsia', 'Fucsia'), ('Green', 'Verde Scuro'), ('Lime', ' Verde Lime'), ('Yellow', 'Giallo'), ('Navy', 'Blue Navy'), ('Blue', 'Blu'), ('Teal', 'Verde Acqua'), ('Azure', 'Azzurro'), ('Pink', 'Rosa')], max_length=16)),
('sb_color_B', models.CharField(blank=True, choices=[('Black', 'Nero'), ('Silver', 'Argento'), ('Gray', 'Grigio'), ('White', 'Bianco'), ('Maroon', 'Amaranto'), ('Red', 'Rosso'), ('Orange', 'Arancione'), ('Purple', 'Viola'), ('Fuchsia', 'Fucsia'), ('Green', 'Verde Scuro'), ('Lime', ' Verde Lime'), ('Yellow', 'Giallo'), ('Navy', 'Blue Navy'), ('Blue', 'Blu'), ('Teal', 'Verde Acqua'), ('Azure', 'Azzurro'), ('Pink', 'Rosa')], max_length=16)),
('court', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='matches', to='skeleton.Court')),
],
),
migrations.CreateModel(
name='Round',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('round', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rounds', to='skeleton.Group')),
],
),
migrations.CreateModel(
name='Score',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('games_played', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('wins', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('losses', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('points_made', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('points_conceded', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('goals_made', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('goals_conceded', models.IntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('group', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='scores', to='skeleton.Group')),
],
),
migrations.CreateModel(
name='Stage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=16)),
('protected', models.BooleanField(default=False)),
('precedent_stage', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='next_stage', to='skeleton.Stage')),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=24)),
('short_name', models.CharField(blank=True, max_length=12)),
('city', models.CharField(blank=True, max_length=36)),
('slug', models.SlugField(blank=True)),
('color', models.CharField(blank=True, choices=[('Black', 'Nero'), ('Silver', 'Argento'), ('Gray', 'Grigio'), ('White', 'Bianco'), ('Maroon', 'Amaranto'), ('Red', 'Rosso'), ('Orange', 'Arancione'), ('Purple', 'Viola'), ('Fuchsia', 'Fucsia'), ('Green', 'Verde Scuro'), ('Lime', ' Verde Lime'), ('Yellow', 'Giallo'), ('Navy', 'Blue Navy'), ('Blue', 'Blu'), ('Teal', 'Verde Acqua'), ('Azure', 'Azzurro'), ('Pink', 'Rosa')], default='White', max_length=16)),
],
),
migrations.CreateModel(
name='Time',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.CharField(blank=True, max_length=16)),
('event', models.CharField(blank=True, max_length=32)),
('initial', models.BooleanField(default=False)),
('day', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='times', to='skeleton.Day')),
('precedent_time', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='next_time', to='skeleton.Time')),
],
),
migrations.CreateModel(
name='Tournament',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.CharField(max_length=99)),
('title', models.CharField(blank=True, max_length=99)),
('slug', models.SlugField(blank=True)),
('active', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Coach',
fields=[
('human_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='skeleton.Human')),
('cell_number', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('email', models.EmailField(blank=True, max_length=64, null=True)),
],
bases=('skeleton.human',),
),
migrations.CreateModel(
name='Player',
fields=[
('human_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='skeleton.Human')),
('year_of_birth', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1900), django.core.validators.MaxValueValidator(2100)])),
('jersey_number', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(999)])),
('all_star_game', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='players', to='skeleton.AllStarGame')),
],
bases=('skeleton.human',),
),
migrations.AddField(
model_name='team',
name='tournament',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='teams', to='skeleton.Tournament'),
),
migrations.AddField(
model_name='stage',
name='tournament',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stages', to='skeleton.Tournament'),
),
migrations.AddField(
model_name='score',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scores', to='skeleton.Team'),
),
migrations.AddField(
model_name='match',
name='round',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='matches', to='skeleton.Round'),
),
migrations.AddField(
model_name='match',
name='team_A',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='matches_A', to='skeleton.Team'),
),
migrations.AddField(
model_name='match',
name='team_B',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='matches_B', to='skeleton.Team'),
),
migrations.AddField(
model_name='match',
name='time',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='matches', to='skeleton.Time'),
),
migrations.AddField(
model_name='group',
name='stage',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='skeleton.Stage'),
),
migrations.AddField(
model_name='day',
name='tournament',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='days', to='skeleton.Tournament'),
),
migrations.AddField(
model_name='court',
name='tournament',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='courts', to='skeleton.Tournament'),
),
migrations.AddField(
model_name='player',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='players', to='skeleton.Team'),
),
migrations.AddField(
model_name='coach',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='coaches', to='skeleton.Team'),
),
]
| 67.858974 | 470 | 0.613263 | 1,678 | 15,879 | 5.68236 | 0.117998 | 0.056633 | 0.079706 | 0.110121 | 0.845936 | 0.8258 | 0.789093 | 0.75291 | 0.741793 | 0.726481 | 0 | 0.010937 | 0.21689 | 15,879 | 233 | 471 | 68.150215 | 0.75585 | 0.002834 | 0 | 0.513274 | 1 | 0 | 0.133653 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022124 | 0 | 0.039823 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
864e9203795b8d46a0cc59fcab7f1e47c21f36fe | 18,419 | py | Python | hdl/cpu/riscv_formal/checks/gen_macros.py | arjenroodselaar/bazel_yosys | 1283e1536659721259123050b78036e8754e49ef | [
"Apache-2.0"
] | null | null | null | hdl/cpu/riscv_formal/checks/gen_macros.py | arjenroodselaar/bazel_yosys | 1283e1536659721259123050b78036e8754e49ef | [
"Apache-2.0"
] | null | null | null | hdl/cpu/riscv_formal/checks/gen_macros.py | arjenroodselaar/bazel_yosys | 1283e1536659721259123050b78036e8754e49ef | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (C) 2017 Clifford Wolf <clifford@symbioticeda.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
print("// Generated by hdl/cpu/riscv_formal/checks/gen_macros.py")
print("")
print("`ifdef YOSYS")
print("`define rvformal_rand_reg rand reg")
print("`define rvformal_const_rand_reg const rand reg")
print("`else")
print("`ifdef SIMULATION")
print("`define rvformal_rand_reg reg")
print("`define rvformal_const_rand_reg reg")
print("`else")
print("`define rvformal_rand_reg wire")
print("`define rvformal_const_rand_reg reg")
print("`endif")
print("`endif")
print("")
print("`ifndef RISCV_FORMAL_VALIDADDR")
print("`define RISCV_FORMAL_VALIDADDR(addr) 1")
print("`endif")
print("")
print("`define rvformal_addr_valid(a) (`RISCV_FORMAL_VALIDADDR(a))")
print("`define rvformal_addr_eq(a, b) ((`rvformal_addr_valid(a) == `rvformal_addr_valid(b)) && (!`rvformal_addr_valid(a) || (a == b)))")
csrs_xlen = list()
csrs_xlen += "fflags frm fcsr".split()
csrs_xlen += "misa".split()
csrs_64 = list()
csrs_64 += "time mcycle minstret".split()
all_csrs = csrs_xlen + csrs_64
for csr in csrs_xlen:
print("")
print("`ifdef RISCV_FORMAL_CSR_%s" % csr.upper())
print("`define rvformal_csr_%s_wires \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rmask; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wmask; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rdata; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wdata;" % csr)
print("`define rvformal_csr_%s_outputs , \\" % csr)
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rmask, \\" % csr)
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wmask, \\" % csr)
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rdata, \\" % csr)
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wdata" % csr)
print("`define rvformal_csr_%s_inputs , \\" % csr)
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rmask, \\" % csr)
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wmask, \\" % csr)
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_rdata, \\" % csr)
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_csr_%s_wdata" % csr)
print("`define rvformal_csr_%s_channel(_idx) \\" % csr)
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] csr_%s_rmask = rvfi_csr_%s_rmask [(_idx)*`RISCV_FORMAL_XLEN +: `RISCV_FORMAL_XLEN]; \\" % (csr, csr))
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] csr_%s_wmask = rvfi_csr_%s_wmask [(_idx)*`RISCV_FORMAL_XLEN +: `RISCV_FORMAL_XLEN]; \\" % (csr, csr))
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] csr_%s_rdata = rvfi_csr_%s_rdata [(_idx)*`RISCV_FORMAL_XLEN +: `RISCV_FORMAL_XLEN]; \\" % (csr, csr))
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] csr_%s_wdata = rvfi_csr_%s_wdata [(_idx)*`RISCV_FORMAL_XLEN +: `RISCV_FORMAL_XLEN];" % (csr, csr))
print("`define rvformal_csr_%s_conn , \\" % csr)
print(".rvfi_csr_%s_rmask (rvfi_csr_%s_rmask), \\" % (csr, csr))
print(".rvfi_csr_%s_wmask (rvfi_csr_%s_wmask), \\" % (csr, csr))
print(".rvfi_csr_%s_rdata (rvfi_csr_%s_rdata), \\" % (csr, csr))
print(".rvfi_csr_%s_wdata (rvfi_csr_%s_wdata)" % (csr, csr))
print("`else")
print("`define rvformal_csr_%s_wires" % csr)
print("`define rvformal_csr_%s_outputs" % csr)
print("`define rvformal_csr_%s_inputs" % csr)
print("`define rvformal_csr_%s_channel(_idx)" % csr)
print("`define rvformal_csr_%s_conn" % csr)
print("`endif")
for csr in csrs_64:
print("")
print("`ifdef RISCV_FORMAL_CSR_%s" % csr.upper())
print("`define rvformal_csr_%s_wires \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rmask; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wmask; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rdata; \\" % csr)
print("(* keep *) wire [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wdata;" % csr)
print("`define rvformal_csr_%s_outputs , \\" % csr)
print("output [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rmask, \\" % csr)
print("output [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wmask, \\" % csr)
print("output [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rdata, \\" % csr)
print("output [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wdata" % csr)
print("`define rvformal_csr_%s_inputs , \\" % csr)
print("input [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rmask, \\" % csr)
print("input [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wmask, \\" % csr)
print("input [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_rdata, \\" % csr)
print("input [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_csr_%s_wdata" % csr)
print("`define rvformal_csr_%s_channel(_idx) \\" % csr)
print("wire [64 - 1 : 0] csr_%s_rmask = rvfi_csr_%s_rmask [(_idx)*64 +: 64]; \\" % (csr, csr))
print("wire [64 - 1 : 0] csr_%s_wmask = rvfi_csr_%s_wmask [(_idx)*64 +: 64]; \\" % (csr, csr))
print("wire [64 - 1 : 0] csr_%s_rdata = rvfi_csr_%s_rdata [(_idx)*64 +: 64]; \\" % (csr, csr))
print("wire [64 - 1 : 0] csr_%s_wdata = rvfi_csr_%s_wdata [(_idx)*64 +: 64];" % (csr, csr))
print("`define rvformal_csr_%s_conn , \\" % csr)
print(".rvfi_csr_%s_rmask (rvfi_csr_%s_rmask), \\" % (csr, csr))
print(".rvfi_csr_%s_wmask (rvfi_csr_%s_wmask), \\" % (csr, csr))
print(".rvfi_csr_%s_rdata (rvfi_csr_%s_rdata), \\" % (csr, csr))
print(".rvfi_csr_%s_wdata (rvfi_csr_%s_wdata)" % (csr, csr))
print("`else")
print("`define rvformal_csr_%s_wires" % csr)
print("`define rvformal_csr_%s_outputs" % csr)
print("`define rvformal_csr_%s_inputs" % csr)
print("`define rvformal_csr_%s_channel(_idx)" % csr)
print("`define rvformal_csr_%s_conn" % csr)
print("`endif")
print("")
print("`ifdef RISCV_FORMAL_EXTAMO")
print("`define rvformal_extamo_wires (* keep *) wire [`RISCV_FORMAL_NRET-1:0] rvfi_mem_extamo;")
print("`define rvformal_extamo_outputs , output [`RISCV_FORMAL_NRET-1:0] rvfi_mem_extamo")
print("`define rvformal_extamo_inputs , input [`RISCV_FORMAL_NRET-1:0] rvfi_mem_extamo")
print("`define rvformal_extamo_channel(_idx) wire mem_extamo = rvfi_mem_extamo [_idx];")
print("`define rvformal_extamo_conn , .rvfi_mem_extamo(rvfi_mem_extamo)")
print("`else")
print("`define rvformal_extamo_wires")
print("`define rvformal_extamo_outputs")
print("`define rvformal_extamo_inputs")
print("`define rvformal_extamo_channel(_idx)")
print("`define rvformal_extamo_conn")
print("`endif")
print("")
print("`define RVFI_WIRES \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET - 1 : 0] rvfi_valid; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_order; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_ILEN - 1 : 0] rvfi_instruction;\\")
print("(* keep *) wire [`RISCV_FORMAL_NRET - 1 : 0] rvfi_trap; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET - 1 : 0] rvfi_halt; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET - 1 : 0] rvfi_intr; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_mode; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_ixl; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs1_addr; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs2_addr; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs1_rdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs2_rdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rd_addr; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rd_wdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_rdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_wdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_addr; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_rmask; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_wmask; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_rdata; \\")
print("(* keep *) wire [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_wdata; \\")
print("`rvformal_extamo_wires \\")
for csr in all_csrs:
print("`rvformal_csr_%s_wires%s" % (csr, "" if csr == all_csrs[-1] else " \\"))
print("")
print("`define RVFI_OUTPUTS \\")
print("output [`RISCV_FORMAL_NRET - 1 : 0] rvfi_valid, \\")
print("output [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_order, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_ILEN - 1 : 0] rvfi_instruction,\\")
print("output [`RISCV_FORMAL_NRET - 1 : 0] rvfi_trap, \\")
print("output [`RISCV_FORMAL_NRET - 1 : 0] rvfi_halt, \\")
print("output [`RISCV_FORMAL_NRET - 1 : 0] rvfi_intr, \\")
print("output [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_mode, \\")
print("output [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_ixl, \\")
print("output [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs1_addr, \\")
print("output [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs2_addr, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs1_rdata, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs2_rdata, \\")
print("output [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rd_addr, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rd_wdata, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_rdata, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_wdata, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_addr, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_rmask, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_wmask, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_rdata, \\")
print("output [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_wdata \\")
print("`rvformal_extamo_outputs \\")
for csr in all_csrs:
print("`rvformal_csr_%s_outputs%s" % (csr, "" if csr == all_csrs[-1] else " \\"))
print("")
print("`define RVFI_INPUTS \\")
print("input [`RISCV_FORMAL_NRET - 1 : 0] rvfi_valid, \\")
print("input [`RISCV_FORMAL_NRET * 64 - 1 : 0] rvfi_order, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_ILEN - 1 : 0] rvfi_instruction,\\")
print("input [`RISCV_FORMAL_NRET - 1 : 0] rvfi_trap, \\")
print("input [`RISCV_FORMAL_NRET - 1 : 0] rvfi_halt, \\")
print("input [`RISCV_FORMAL_NRET - 1 : 0] rvfi_intr, \\")
print("input [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_mode, \\")
print("input [`RISCV_FORMAL_NRET * 2 - 1 : 0] rvfi_ixl, \\")
print("input [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs1_addr, \\")
print("input [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rs2_addr, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs1_rdata, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rs2_rdata, \\")
print("input [`RISCV_FORMAL_NRET * 5 - 1 : 0] rvfi_rd_addr, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_rd_wdata, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_rdata, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_pc_wdata, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_addr, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_rmask, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN/8 - 1 : 0] rvfi_mem_wmask, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_rdata, \\")
print("input [`RISCV_FORMAL_NRET * `RISCV_FORMAL_XLEN - 1 : 0] rvfi_mem_wdata \\")
print("`rvformal_extamo_inputs \\")
for csr in all_csrs:
print("`rvformal_csr_%s_inputs%s" % (csr, "" if csr == all_csrs[-1] else " \\"))
print("")
print("`define RVFI_CHANNEL(_name, _idx) \\")
print("generate if(1) begin:_name \\")
print("wire [ 1 - 1 : 0] valid = rvfi_valid [(_idx)*( 1 ) +: 1 ]; \\")
print("wire [ 64 - 1 : 0] order = rvfi_order [(_idx)*( 64 ) +: 64 ]; \\")
print("wire [`RISCV_FORMAL_ILEN - 1 : 0] instruction = rvfi_instruction[(_idx)*(`RISCV_FORMAL_ILEN ) +: `RISCV_FORMAL_ILEN ]; \\")
print("wire [ 1 - 1 : 0] trap = rvfi_trap [(_idx)*( 1 ) +: 1 ]; \\")
print("wire [ 1 - 1 : 0] halt = rvfi_halt [(_idx)*( 1 ) +: 1 ]; \\")
print("wire [ 1 - 1 : 0] intr = rvfi_intr [(_idx)*( 1 ) +: 1 ]; \\")
print("wire [ 2 - 1 : 0] mode = rvfi_mode [(_idx)*( 2 ) +: 2 ]; \\")
print("wire [ 2 - 1 : 0] ixl = rvfi_ixl [(_idx)*( 2 ) +: 2 ]; \\")
print("wire [ 5 - 1 : 0] rs1_addr = rvfi_rs1_addr [(_idx)*( 5 ) +: 5 ]; \\")
print("wire [ 5 - 1 : 0] rs2_addr = rvfi_rs2_addr [(_idx)*( 5 ) +: 5 ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] rs1_rdata = rvfi_rs1_rdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] rs2_rdata = rvfi_rs2_rdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [ 5 - 1 : 0] rd_addr = rvfi_rd_addr [(_idx)*( 5 ) +: 5 ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] rd_wdata = rvfi_rd_wdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] pc_rdata = rvfi_pc_rdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] pc_wdata = rvfi_pc_wdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] mem_addr = rvfi_mem_addr [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN/8 - 1 : 0] mem_rmask = rvfi_mem_rmask [(_idx)*(`RISCV_FORMAL_XLEN/8) +: `RISCV_FORMAL_XLEN/8]; \\")
print("wire [`RISCV_FORMAL_XLEN/8 - 1 : 0] mem_wmask = rvfi_mem_wmask [(_idx)*(`RISCV_FORMAL_XLEN/8) +: `RISCV_FORMAL_XLEN/8]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] mem_rdata = rvfi_mem_rdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("wire [`RISCV_FORMAL_XLEN - 1 : 0] mem_wdata = rvfi_mem_wdata [(_idx)*(`RISCV_FORMAL_XLEN ) +: `RISCV_FORMAL_XLEN ]; \\")
print("`rvformal_extamo_channel(_idx) \\")
for csr in all_csrs:
print("`rvformal_csr_%s_channel(_idx) \\" % csr)
print("end endgenerate")
print("")
print("`define RVFI_CONN \\")
print(".rvfi_valid (rvfi_valid ), \\")
print(".rvfi_order (rvfi_order ), \\")
print(".rvfi_instruction (rvfi_instruction), \\")
print(".rvfi_trap (rvfi_trap ), \\")
print(".rvfi_halt (rvfi_halt ), \\")
print(".rvfi_intr (rvfi_intr ), \\")
print(".rvfi_mode (rvfi_mode ), \\")
print(".rvfi_ixl (rvfi_ixl ), \\")
print(".rvfi_rs1_addr (rvfi_rs1_addr ), \\")
print(".rvfi_rs2_addr (rvfi_rs2_addr ), \\")
print(".rvfi_rs1_rdata (rvfi_rs1_rdata ), \\")
print(".rvfi_rs2_rdata (rvfi_rs2_rdata ), \\")
print(".rvfi_rd_addr (rvfi_rd_addr ), \\")
print(".rvfi_rd_wdata (rvfi_rd_wdata ), \\")
print(".rvfi_pc_rdata (rvfi_pc_rdata ), \\")
print(".rvfi_pc_wdata (rvfi_pc_wdata ), \\")
print(".rvfi_mem_addr (rvfi_mem_addr ), \\")
print(".rvfi_mem_rmask (rvfi_mem_rmask ), \\")
print(".rvfi_mem_wmask (rvfi_mem_wmask ), \\")
print(".rvfi_mem_rdata (rvfi_mem_rdata ), \\")
print(".rvfi_mem_wdata (rvfi_mem_wdata ) \\")
print("`rvformal_extamo_conn \\")
for csr in all_csrs:
print("`rvformal_csr_%s_conn%s" % (csr, "" if csr == all_csrs[-1] else " \\"))
| 65.084806 | 149 | 0.595418 | 2,493 | 18,419 | 4.013638 | 0.06418 | 0.205577 | 0.134919 | 0.076754 | 0.829602 | 0.737757 | 0.721867 | 0.714172 | 0.669298 | 0.605637 | 0 | 0.028382 | 0.236766 | 18,419 | 282 | 150 | 65.315603 | 0.683383 | 0.042293 | 0 | 0.242798 | 0 | 0.082305 | 0.796142 | 0.097362 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.946502 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
864e98cd7c8f05034c1ae580ae2e31928caab303 | 16,799 | py | Python | fiftyone/utils/data/ingestors.py | Vs0923/Voxel51 | d644805922ebfbc729f1211f572d77be7d625887 | [
"Apache-2.0"
] | null | null | null | fiftyone/utils/data/ingestors.py | Vs0923/Voxel51 | d644805922ebfbc729f1211f572d77be7d625887 | [
"Apache-2.0"
] | null | null | null | fiftyone/utils/data/ingestors.py | Vs0923/Voxel51 | d644805922ebfbc729f1211f572d77be7d625887 | [
"Apache-2.0"
] | null | null | null | """
Dataset ingestors.
| Copyright 2017-2020, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
import logging
import eta.core.image as etai
import eta.core.utils as etau
import fiftyone as fo
import fiftyone.core.utils as fou
from .importers import (
UnlabeledImageDatasetImporter,
LabeledImageDatasetImporter,
UnlabeledVideoDatasetImporter,
LabeledVideoDatasetImporter,
)
logger = logging.getLogger(__name__)
class ImageIngestor(object):
"""Mixin for :class:`fiftyone.utils.data.importers.DatasetImporter`
instances that ingest images into the provided ``dataset_dir`` during
import.
Args:
dataset_dir: the directory where input images will be ingested into
"""
def __init__(self, dataset_dir, image_format=None):
if image_format is None:
image_format = fo.config.default_image_ext
self.dataset_dir = dataset_dir
self.image_format = image_format
self._filename_maker = None
def _ingest_image(self, sample_parser):
if sample_parser.has_image_path:
try:
return self._ingest_image_from_path(sample_parser)
except:
# Allow for SampleParsers that declare `has_image_path == True`
# but cannot generate paths at runtime, e.g., because they
# support inputs of the form `image_or_path` and an image, not
# a path, was provided
pass
return self._ingest_in_memory_image(sample_parser)
def _ingest_image_from_path(self, sample_parser):
image_path = sample_parser.get_image_path()
output_image_path = self._filename_maker.get_output_path(image_path)
etau.copy_file(image_path, output_image_path)
return output_image_path
def _ingest_in_memory_image(self, sample_parser):
img = sample_parser.get_image()
image_path = self._filename_maker.get_output_path()
etai.write(img, image_path)
return image_path
def _setup(self):
self._filename_maker = fou.UniqueFilenameMaker(
output_dir=self.dataset_dir, default_ext=self.image_format
)
class UnlabeledImageDatasetIngestor(
UnlabeledImageDatasetImporter, ImageIngestor
):
"""Dataset importer that ingests unlabeled images into the provided
``dataset_dir`` during import.
The source images are parsed from the provided ``samples`` using the
provided :class:`fiftyone.utils.data.parsers.UnlabeledImageSampleParser`.
If an image path is available via
:func:`fiftyone.utils.data.parsers.UnlabeledImageSampleParser.get_image_path`,
then the image is directly copied from its source location into
``dataset_dir``. In this case, the original filename is maintained, unless
a name conflict would occur, in which case an index of the form
``"-%d" % count`` is appended to the base filename.
If no image path is available, the image is read in-memory via
:func:`fiftyone.utils.data.parsers.UnlabeledImageSampleParser.get_image`
and written to ``dataset_dir`` in the following format::
<dataset_dir>/<image_count><image_format>
where ``image_count`` is the number of files in ``dataset_dir``.
Args:
dataset_dir: the directory where input images will be ingested into
samples: an iterable of samples
sample_parser: an
:class:`fiftyone.utils.data.parsers.UnlabeledImageSampleParser` to
use to parse the samples
image_format (None): the image format to use when writing in-memory
images to disk. By default, ``fiftyone.config.default_image_ext``
is used
max_samples (None): a maximum number of samples to import. By default,
all samples are imported
"""
def __init__(
self,
dataset_dir,
samples,
sample_parser,
image_format=None,
max_samples=None,
**kwargs
):
for arg in kwargs:
logger.warning("Ignoring unsupported parameter '%s'", arg)
UnlabeledImageDatasetImporter.__init__(
self, dataset_dir, max_samples=max_samples
)
ImageIngestor.__init__(self, dataset_dir, image_format=image_format)
self.samples = samples
self.sample_parser = sample_parser
self._iter_samples = None
self._num_samples = None
self._num_imported = None
def __iter__(self):
self._num_imported = 0
self._iter_samples = iter(self.samples)
return self
def __len__(self):
if self._num_samples is not None:
return self._num_samples
return len(self.samples)
def __next__(self):
if (
self.max_samples is not None
and self._num_imported >= self.max_samples
):
raise StopIteration
sample = next(self._iter_samples)
self.sample_parser.with_sample(sample)
image_path = self._ingest_image(self.sample_parser)
if self.has_image_metadata:
image_metadata = self.sample_parser.get_image_metadata()
else:
image_metadata = None
self._num_imported += 1
return image_path, image_metadata
@property
def has_dataset_info(self):
return False
@property
def has_image_metadata(self):
return self.sample_parser.has_image_metadata
def setup(self):
self._setup()
try:
self._num_samples = len(self.samples)
if self.max_samples is not None:
self._num_samples = min(self._num_samples, self.max_samples)
except:
pass
class LabeledImageDatasetIngestor(LabeledImageDatasetImporter, ImageIngestor):
"""Dataset importer that ingests labeled images into the provided
``dataset_dir`` during import.
The source images and labels are parsed from the provided ``samples`` using
the provided :class:`fiftyone.utils.data.parsers.LabeledImageSampleParser`.
If an image path is available via
:func:`fiftyone.utils.data.parsers.LabeledImageSampleParser.get_image_path`,
then the image is directly copied from its source location into
``dataset_dir``. In this case, the original filename is maintained, unless
a name conflict would occur, in which case an index of the form
``"-%d" % count`` is appended to the base filename.
If no image path is available, the image is read in-memory via
:func:`fiftyone.utils.data.parsers.LabeledImageSampleParser.get_image` and
written to ``dataset_dir`` in the following format::
<dataset_dir>/<image_count><image_format>
where ``image_count`` is the number of files in ``dataset_dir``.
Args:
dataset_dir: the directory where input images will be ingested into
samples: an iterable of samples
sample_parser: an
:class:`fiftyone.utils.data.parsers.LabeledImageSampleParser` to
use to parse the samples
image_format (None): the image format to use when writing in-memory
images to disk. By default, ``fiftyone.config.default_image_ext``
is used
skip_unlabeled (False): whether to skip unlabeled images when importing
max_samples (None): a maximum number of samples to import. By default,
all samples are imported
"""
def __init__(
self,
dataset_dir,
samples,
sample_parser,
image_format=None,
skip_unlabeled=False,
max_samples=None,
**kwargs
):
for arg in kwargs:
logger.warning("Ignoring unsupported parameter '%s'", arg)
LabeledImageDatasetImporter.__init__(
self,
dataset_dir,
skip_unlabeled=skip_unlabeled,
max_samples=max_samples,
)
ImageIngestor.__init__(self, dataset_dir, image_format=image_format)
self.samples = samples
self.sample_parser = sample_parser
self._iter_samples = None
self._num_samples = None
self._num_imported = None
def __iter__(self):
self._num_imported = 0
self._iter_samples = iter(self.samples)
return self
def __len__(self):
if self._num_samples is not None:
return self._num_samples
return len(self.samples)
def __next__(self):
if (
self.max_samples is not None
and self._num_imported >= self.max_samples
):
raise StopIteration
image_path, image_metadata, label = self._parse_next_sample()
if self.skip_unlabeled:
while label is None:
image_path, image_metadata, label = self._parse_next_sample()
self._num_imported += 1
return image_path, image_metadata, label
def _parse_next_sample(self):
sample = next(self._iter_samples)
self.sample_parser.with_sample(sample)
image_path = self._ingest_image(self.sample_parser)
if self.has_image_metadata:
image_metadata = self.sample_parser.get_image_metadata()
else:
image_metadata = None
label = self.sample_parser.get_label()
return image_path, image_metadata, label
@property
def has_dataset_info(self):
return False
@property
def has_image_metadata(self):
return self.sample_parser.has_image_metadata
@property
def label_cls(self):
return self.sample_parser.label_cls
def setup(self):
self._setup()
try:
self._num_samples = len(self.samples)
if self.max_samples is not None:
self._num_samples = min(self._num_samples, self.max_samples)
except:
pass
class VideoIngestor(object):
"""Mixin for :class:`fiftyone.utils.data.importers.DatasetImporter`
instances that ingest videos into the provided ``dataset_dir`` during
import.
Args:
dataset_dir: the directory where input videos will be ingested into
"""
def __init__(self, dataset_dir):
self.dataset_dir = dataset_dir
self._filename_maker = None
def _ingest_video(self, sample_parser):
video_path = sample_parser.get_video_path()
output_video_path = self._filename_maker.get_output_path(video_path)
etau.copy_file(video_path, output_video_path)
return output_video_path
def _setup(self):
self._filename_maker = fou.UniqueFilenameMaker(
output_dir=self.dataset_dir
)
class UnlabeledVideoDatasetIngestor(
UnlabeledVideoDatasetImporter, VideoIngestor
):
"""Dataset importer that ingests unlabeled videos into the provided
``dataset_dir`` during import.
The source videos are parsed from the provided ``samples`` using the
provided :class:`fiftyone.utils.data.parsers.UnlabeledVideoSampleParser`.
The source videos are directly copied from their source locations into
``dataset_dir``, maintaining the original filenames, unless a name conflict
would occur, in which case an index of the form ``"-%d" % count`` is
appended to the base filename.
Args:
dataset_dir: the directory where input videos will be ingested into
samples: an iterable of samples
sample_parser: an
:class:`fiftyone.utils.data.parsers.UnlabeledVideoSampleParser` to
use to parse the samples
max_samples (None): a maximum number of samples to import. By default,
all samples are imported
"""
def __init__(
self, dataset_dir, samples, sample_parser, max_samples=None, **kwargs
):
for arg in kwargs:
logger.warning("Ignoring unsupported parameter '%s'", arg)
UnlabeledVideoDatasetImporter.__init__(
self, dataset_dir, max_samples=max_samples
)
VideoIngestor.__init__(self, dataset_dir)
self.samples = samples
self.sample_parser = sample_parser
self._iter_samples = None
self._num_samples = None
self._num_imported = None
def __iter__(self):
self._num_imported = 0
self._iter_samples = iter(self.samples)
return self
def __len__(self):
if self._num_samples is not None:
return self._num_samples
return len(self.samples)
def __next__(self):
if (
self.max_samples is not None
and self._num_imported >= self.max_samples
):
raise StopIteration
sample = next(self._iter_samples)
self.sample_parser.with_sample(sample)
video_path = self._ingest_video(self.sample_parser)
if self.has_video_metadata:
video_metadata = self.sample_parser.get_video_metadata()
else:
video_metadata = None
self._num_imported += 1
return video_path, video_metadata
@property
def has_dataset_info(self):
return False
@property
def has_video_metadata(self):
return self.sample_parser.has_video_metadata
def setup(self):
self._setup()
try:
self._num_samples = len(self.samples)
if self.max_samples is not None:
self._num_samples = min(self._num_samples, self.max_samples)
except:
pass
class LabeledVideoDatasetIngestor(LabeledVideoDatasetImporter, VideoIngestor):
"""Dataset importer that ingests labeled videos into the provided
``dataset_dir`` during import.
The source videos and labels are parsed from the provided ``samples`` using
the provided :class:`fiftyone.utils.data.parsers.LabeledVideoSampleParser`.
The source videos are directly copied from their source locations into
``dataset_dir``, maintaining the original filenames, unless a name conflict
would occur, in which case an index of the form ``"-%d" % count`` is
appended to the base filename.
Args:
dataset_dir: the directory where input videos will be ingested into
samples: an iterable of samples
sample_parser: an
:class:`fiftyone.utils.data.parsers.LabeledVideoSampleParser` to
use to parse the samples
skip_unlabeled (False): whether to skip unlabeled videos when importing
max_samples (None): a maximum number of samples to import. By default,
all samples are imported
"""
def __init__(
self,
dataset_dir,
samples,
sample_parser,
skip_unlabeled=False,
max_samples=None,
**kwargs
):
for arg in kwargs:
logger.warning("Ignoring unsupported parameter '%s'", arg)
LabeledVideoDatasetImporter.__init__(
self,
dataset_dir,
skip_unlabeled=skip_unlabeled,
max_samples=max_samples,
)
VideoIngestor.__init__(self, dataset_dir)
self.samples = samples
self.sample_parser = sample_parser
self._iter_samples = None
self._num_samples = None
self._num_imported = None
def __iter__(self):
self._num_imported = 0
self._iter_samples = iter(self.samples)
return self
def __len__(self):
if self._num_samples is not None:
return self._num_samples
return len(self.samples)
def __next__(self):
if (
self.max_samples is not None
and self._num_imported >= self.max_samples
):
raise StopIteration
video_path, video_metadata, frames = self._parse_next_sample()
if self.skip_unlabeled:
while frames is None:
video_path, video_metadata, frames = self._parse_next_sample()
self._num_imported += 1
return video_path, video_metadata, frames
def _parse_next_sample(self):
sample = next(self._iter_samples)
self.sample_parser.with_sample(sample)
video_path = self._ingest_video(self.sample_parser)
if self.has_video_metadata:
video_metadata = self.sample_parser.has_video_metadata()
else:
video_metadata = None
frames = self.sample_parser.get_frame_labels()
return video_path, video_metadata, frames
@property
def has_dataset_info(self):
return False
@property
def has_video_metadata(self):
return self.sample_parser.has_video_metadata
def setup(self):
self._setup()
try:
self._num_samples = len(self.samples)
if self.max_samples is not None:
self._num_samples = min(self._num_samples, self.max_samples)
except:
pass
| 31.283054 | 82 | 0.657599 | 2,009 | 16,799 | 5.218019 | 0.101543 | 0.051512 | 0.04121 | 0.024039 | 0.861681 | 0.828961 | 0.778689 | 0.76667 | 0.75465 | 0.70352 | 0 | 0.0018 | 0.272457 | 16,799 | 536 | 83 | 31.341418 | 0.855916 | 0.324186 | 0 | 0.781759 | 0 | 0 | 0.012818 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.127036 | false | 0.016287 | 0.110749 | 0.029316 | 0.361564 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8658091d4bfada7766051e88813f1c35ae0152f9 | 127,153 | py | Python | BPBackendDjango/BPBackendDjango/testing.py | bp-momentum/BP-backend | f6b4b344c2c5fae3c8bb17874771aa49a48e97ef | [
"MIT"
] | 3 | 2022-03-15T09:56:31.000Z | 2022-03-15T09:56:59.000Z | BPBackendDjango/BPBackendDjango/testing.py | bp-momentum/BP-backend | f6b4b344c2c5fae3c8bb17874771aa49a48e97ef | [
"MIT"
] | 38 | 2022-01-16T18:26:10.000Z | 2022-03-14T23:14:40.000Z | BPBackendDjango/BPBackendDjango/testing.py | bp-momentum/BP-backend | f6b4b344c2c5fae3c8bb17874771aa49a48e97ef | [
"MIT"
] | null | null | null | from django.test import TestCase
from .models import Achievement, Admin, DoneExercises, Exercise, ExerciseInPlan, Friends, Leaderboard, Location, OpenToken, Trainer, TrainingSchedule, User, UserAchievedAchievement, UserMedalInExercise
from .settings import INTERN_SETTINGS
from .Helperclasses.fortests import ViewSupport
from .Helperclasses.jwttoken import JwToken
from .Helperclasses.handlers import ExerciseHandler, FriendHandler, InvitationsHandler, TrainerHandler, UserHandler
from .Views.leaderboardviews import ListLeaderboardView
from .Views.friendviews import AcceptRequestView, AddFriendView, DeclineRequestView, DeleteFriendView, GetMyFriendsView, GetPendingRequestView, GetProfileOfFriendView, GetRequestView
from .Views.userviews import AuthView, ChangeAvatarView, ChangeMotivationView, ChangePasswordView, ChangeTrainerAcademiaView, ChangeTrainerTelephoneView, ChangeUsernameView, CreateUserView, DeleteAccountView, DeleteTrainerView, DeleteUserView, GetInvitedView, GetListOfUsers, GetProfileView, GetStreakView, GetPasswordResetEmailView, GetTrainerContactView, GetUserLevelView, GetUsersOfTrainerView, GetTrainersView, InvalidateInviteView, LoginView, LogoutAllDevicesView, RegisterView, SearchUserView, SetPasswordResetEmailView, SetTrainerLocationView
from .Views.achievementviews import GetAchievementsView, ReloadAfterExerciseView, ReloadFriendAchievementView, GetMedals
from .Views.exerciseviews import GetDoneExercisesOfMonthView, GetDoneExercisesView, GetExerciseListView, GetExerciseView
from .Views.planviews import AddPlanToUserView, CreatePlanView, DeletePlanView, GetAllPlansView, GetPlanOfUser, ShowPlanView
import hashlib
import time
import datetime
class UserTestCase(TestCase):
trainer_id = 1
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
def test_if_exists(self):
#test if was created
self.assertTrue(Trainer.objects.filter(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234").exists())
self.assertTrue(User.objects.filter(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=self.trainer_id, email_address="prescher-erik@web.de", password="Password1234").exists())
def test_if_user_gets_deleted_when_trainer_gets_deleted(self):
#test if on_delete works as wanted
Trainer.objects.filter(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234").delete()
self.assertFalse(Trainer.objects.filter(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234").exists())
self.assertFalse(User.objects.filter(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=self.trainer_id, email_address="prescher-erik@web.de", password="Password1234").exists())
class DeleteUserTestCase(TestCase):
user_id = 1
user_id_2 = 2
trainer_id = 1
exercise_id = 1
done_ex_id = 1
friends_id = 1
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=trainer, email_address="test@bla.de", password="Password1234")
self.user_id = user1.id
self.user_id_2 = user2.id
exercise:Exercise = Exercise.objects.create(title='Squat', description='Just do it.')
self.exercise_id = exercise.id
plan:TrainingSchedule = TrainingSchedule.objects.create(name="testplan", trainer=trainer)
exercise_plan:ExerciseInPlan = ExerciseInPlan.objects.create(exercise=exercise, plan=plan)
DoneExercises.objects.create(exercise=exercise_plan, user=user1, points=98)
self.done_ex_id = DoneExercises.objects.get(points=98).id
Friends.objects.create(friend1=user1, friend2=user2)
self.friends_id = Friends.objects.get(friend1=self.user_id).id
def test_cascade(self):
#test cascade of user
User.objects.filter(id=self.user_id).delete()
self.assertTrue(User.objects.filter(id=self.user_id_2).exists())
self.assertTrue(Trainer.objects.filter(id=self.trainer_id).exists())
self.assertTrue(Exercise.objects.filter(id=self.exercise_id).exists())
self.assertFalse(User.objects.filter(id=self.user_id).exists())
self.assertFalse(DoneExercises.objects.filter(id=self.done_ex_id).exists())
self.assertFalse(Friends.objects.filter(id=self.friends_id).exists())
class ExerciseTestCase(TestCase):
def setUp(self):
Exercise.objects.create(title='Kniebeuge', description="Gehe in die Knie, achte...")
Exercise.objects.create(title='Liegestütze', description="Mache Liegestütze", activated=False)
def test_if_exists(self):
#test if exist
self.assertTrue(Exercise.objects.filter(title='Kniebeuge', description="Gehe in die Knie, achte...", video=None, activated=True).exists())
self.assertTrue(Exercise.objects.filter(title='Liegestütze', description="Mache Liegestütze", video=None, activated=False).exists())
def test_if_delete_works(self):
#test delete
Exercise.objects.filter(title='Kniebeuge', description="Gehe in die Knie, achte...", video=None, activated=True).delete()
Exercise.objects.filter(title='Liegestütze', description="Mache Liegestütze", video=None, activated=False).delete()
self.assertFalse(Exercise.objects.filter(title='Kniebeuge', description="Gehe in die Knie, achte...", video=None, activated=True).exists())
self.assertFalse(Exercise.objects.filter(title='Liegestütze', description="Mache Liegestütze", video=None, activated=False).exists())
class PlanTestCase(TestCase):
trainer_id = 0
user_id = 0
ex_id = 0
ts_id = 0
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
self.user_id = user.id
ex:Exercise = Exercise.objects.create(title='Kniebeuge', description="Gehe in die Knie, achte...")
self.ex_id = ex.id
ts:TrainingSchedule = TrainingSchedule.objects.create(trainer=trainer)
self.ts_id = ts.id
ExerciseInPlan.objects.create(date="monday", sets=5, repeats_per_set=10, exercise=ex, plan=ts)
user.plan = ts
user.save()
def test_if_exists(self):
self.assertTrue(TrainingSchedule.objects.filter(trainer=self.trainer_id).exists())
self.assertTrue(ExerciseInPlan.objects.filter(exercise=self.ex_id, plan=self.ts_id).exists())
self.assertTrue(User.objects.filter(first_name="Erik").exists())
user:User = User.objects.get(first_name="Erik")
self.assertEquals(user.plan.id, self.ts_id)
def test_if_related_deletes_work(self):
#test cascade if Exercise is deleted
Exercise.objects.filter(title='Kniebeuge').delete()
self.assertFalse(ExerciseInPlan.objects.filter(exercise=self.ex_id, plan=self.ts_id))
#recreate data
Exercise.objects.create(title='Kniebeuge', description="Gehe in die Knie, achte...")
ex:Exercise = Exercise.objects.get(title='Kniebeuge')
self.ex_id = ex.id
ts:TrainingSchedule = TrainingSchedule.objects.get(id=self.ts_id)
ExerciseInPlan.objects.create(date="monday", sets=5, repeats_per_set=10, exercise=ex, plan=ts)
#test cascade if Trainer is deleted
Trainer.objects.filter(first_name="Erik").delete()
self.assertFalse(User.objects.filter(first_name="Erik").exists())
self.assertFalse(TrainingSchedule.objects.filter(id=self.ts_id).exists())
self.assertFalse(ExerciseInPlan.objects.filter(exercise=self.ex_id, plan=self.ts_id))
#recreate data
Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
trainer:Trainer = Trainer.objects.get(first_name="Erik")
self.trainer_id = trainer.id
User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
user:User = User.objects.get(first_name="Erik")
self.user_id = user.id
ts:TrainingSchedule = TrainingSchedule.objects.create(trainer=trainer)
self.ts_id = ts.id
ExerciseInPlan.objects.create(date="monday", sets=5, repeats_per_set=10, exercise=ex, plan=ts)
user.plan = ts
user.save()
#delete plan
TrainingSchedule.objects.filter(id=self.ts_id).delete()
self.assertFalse(TrainingSchedule.objects.filter(id=self.ts_id).exists())
user = User.objects.get(first_name="Erik")
self.assertFalse(ExerciseInPlan.objects.filter(exercise=self.ex_id, plan=self.ts_id))
self.assertEquals(user.plan, None)
class getUsersAndTrainersTestCase(TestCase):
admin:Admin = None
trainers = []
users = []
def setUp(self) -> None:
self.admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="DerAdmin", password="Password1234")
self.trainers.append(Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234"))
self.trainers.append(Trainer.objects.create(first_name="Jannis", last_name="Bauer", username="DerAndereTrainer", email_address="prescher-erik@web.de", password="Password1234"))
User.objects.create(first_name="vorname", last_name="nachname", username="user1", email_address="user1@users.com", trainer=self.trainers[0],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user2", email_address="user2@users.com", trainer=self.trainers[0],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user3", email_address="user3@users.com", trainer=self.trainers[0],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user4", email_address="user4@users.com", trainer=self.trainers[0],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user5", email_address="user5@users.com", trainer=self.trainers[0],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user6", email_address="user6@users.com", trainer=self.trainers[1],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user7", email_address="user7@users.com", trainer=self.trainers[1],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user8", email_address="user8@users.com", trainer=self.trainers[1],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user9", email_address="user9@users.com", trainer=self.trainers[1],password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user10", email_address="user10@users.com", trainer=self.trainers[1],password="pswd22")
self.users = list(User.objects.all())
def test_methods(self):
token1 = JwToken.create_session_token(self.admin.username, 'admin')
token2 = JwToken.create_session_token(self.trainers[0].username, 'trainer')
token3 = JwToken.create_session_token(self.trainers[1].username, 'trainer')
token4 = JwToken.create_session_token(self.users[0].username, 'user')
#trainer getting his user
request = ViewSupport.setup_request({'Session-Token': token2}, {})
response = GetUsersOfTrainerView.get(GetUsersOfTrainerView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data_for_upper(User.objects.filter(trainer=self.trainers[0])))
#admin getting user of specific trainer
request = ViewSupport.setup_request({'Session-Token': token1}, {'id': self.trainers[1].id})
response = GetUsersOfTrainerView.post(GetUsersOfTrainerView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data_for_upper(User.objects.filter(trainer=self.trainers[1])))
#admin getting trainers
request = ViewSupport.setup_request({'Session-Token': token1}, {})
response = GetTrainersView.get(GetTrainersView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('trainers'), TrainerHandler.get_trainers_data(Trainer.objects.all()))
#trainer deleting user
id = self.users[9].id
request = ViewSupport.setup_request({'Session-Token': token1}, {'id': id})
response = DeleteUserView.post(DeleteUserView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(User.objects.filter(id=id).exists())
#trainer not allowed to delete user
id = self.users[8].id
request = ViewSupport.setup_request({'Session-Token': token2}, {'id': id})
response = DeleteUserView.post(DeleteUserView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(User.objects.filter(id=id).exists())
#same user now deleted by his trainer
request = ViewSupport.setup_request({'Session-Token': token3}, {'id': id})
response = DeleteUserView.post(DeleteUserView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(User.objects.filter(id=id).exists())
#admin deleting trainer
id = self.trainers[1].id
request = ViewSupport.setup_request({'Session-Token': token1}, {'id': id})
response = DeleteTrainerView.post(DeleteTrainerView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(Trainer.objects.filter(id=id).exists())
#invalid request
#user not allowed to get users of trainer
request = ViewSupport.setup_request({'Session-Token': token4}, {})
response = GetUsersOfTrainerView.get(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
#invalid trainer
request = ViewSupport.setup_request({'Session-Token': token4}, {})
response = GetUsersOfTrainerView.get(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
#admin can not get users of himself
request = ViewSupport.setup_request({'Session-Token': token1}, {})
response = GetUsersOfTrainerView.get(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
#trainer not allowed to get other trainers users
request = ViewSupport.setup_request({'Session-Token': token2}, {'id': self.trainers[1].id})
response = GetUsersOfTrainerView.post(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
#user not allowed to get trainers users
request = ViewSupport.setup_request({'Session-Token': token4}, {'id': self.trainers[1].id})
response = GetUsersOfTrainerView.post(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
#trainer not allowed to get trainers
request = ViewSupport.setup_request({'Session-Token': token2}, {})
response = GetTrainersView.get(GetTrainersView, request)
self.assertFalse(response.data.get('success'))
#user not allowed to get trainers
request = ViewSupport.setup_request({'Session-Token': token4}, {})
response = GetTrainersView.get(GetTrainersView, request)
self.assertFalse(response.data.get('success'))
#user not allowed to delte other users
id = self.users[4].id
request = ViewSupport.setup_request({'Session-Token': token4}, {'id': id})
response = DeleteUserView.post(DeleteUserView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(User.objects.filter(id=id).exists())
#invalid user
id = self.users[7].id
request = ViewSupport.setup_request({'Session-Token': token1}, {'id': id})
response = DeleteUserView.post(DeleteUserView, request)
self.assertFalse(response.data.get('success'))
#invalid trainer
id = self.trainers[1].id
request = ViewSupport.setup_request({'Session-Token': token1}, {'id': id})
response = DeleteTrainerView.post(DeleteTrainerView, request)
self.assertFalse(response.data.get('success'))
self.assertFalse(Trainer.objects.filter(id=id).exists())
#user can not delete trainer
id = self.trainers[0].id
request = ViewSupport.setup_request({'Session-Token': token4}, {'id': id})
response = DeleteTrainerView.post(DeleteTrainerView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(Trainer.objects.filter(id=id).exists())
#trainer can not delete itself (via this view)
id = self.trainers[0].id
request = ViewSupport.setup_request({'Session-Token': token2}, {'id': id})
response = DeleteTrainerView.post(DeleteTrainerView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(Trainer.objects.filter(id=id).exists())
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetTrainersView.get(GetTrainersView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
request = ViewSupport.setup_request({}, {})
response = GetUsersOfTrainerView.get(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
request = ViewSupport.setup_request({}, {})
response = GetUsersOfTrainerView.post(GetUsersOfTrainerView, request)
self.assertFalse(response.data.get('success'))
request = ViewSupport.setup_request({}, {})
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
response = DeleteTrainerView.post(DeleteTrainerView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
request = ViewSupport.setup_request({}, {})
response = DeleteUserView.post(DeleteUserView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
class AchievementTestCase(TestCase):
trainer:Trainer = None
user1:User = None
user2:User = None
token1 = None
token2 = None
token3 = None
achievement1:Achievement = None
achievement2:Achievement = None
def setUp(self) -> None:
admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", password="Password1234")
self.trainer:User = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=self.trainer, email_address="prescher-erik@web.de", password="Password1234", streak=3)
self.user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=self.trainer, email_address="test@bla.de", password="Password1234")
self.token1 = JwToken.create_session_token(admin.username, 'admin')
self.token2 = JwToken.create_session_token(self.trainer.username, 'trainer')
self.token3 = JwToken.create_session_token(self.user1.username, 'user')
self.achievement1:Achievement = Achievement.objects.create(name='streak', title='{"en":"Streak","de":"Streak"}', description='{"en": "get a streak", "de": "sammel eine Streak"}', icon='{"4":"www.test.de/streak4","3":"www.test.de/streak3","2":"www.test.de/streak2","1":"www.test.de/streak1","0":"www.test.de/streak0"}')
self.achievement2:Achievement = Achievement.objects.create(name='havingFriends', title='{"en":"A Friend!","de":"Freundschaft!"}', description='{"en": "add a friend", "de": "habe einen Freund"}', icon='{"1":"www.test.de/friends1","0":"www.test.de/friends0"}')
def test_get_achievements(self):
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetAchievementsView.get(GetAchievementsView, request)
self.assertTrue(response.data.get('success'))
expected = [{
'name': 'doneExercises',
'title': 'Abgeschlossene Übungen',
'description': "Mache Übungen um diese Errungenschaft zu bekommen beziehungsweise hoch zu leveln",
'level': 0,
'progress': '0/10',
'hidden': False,
'icon': 'https://cdn.geoscribble.de/achievements/doneExercises_0.svg'
}, {
'name': 'havingFriends',
'title': 'Freundschaft!',
'description': "habe einen Freund",
'level': 0,
'progress': '0/1',
'hidden': False,
'icon': "www.test.de/friends0"
}, {
'name': 'streak',
'title': 'Streak',
'description': "sammel eine Streak",
'level': 1,
'progress': '3/7',
'hidden': False,
'icon': "www.test.de/streak1"
}, {
'name': 'perfectExercise',
'title': 'Perfekte Übung',
'description': "Erreiche 100 Prozent bei einer Übung",
'level': 0,
'progress': '0/1',
'hidden': False,
'icon': 'https://cdn.geoscribble.de/achievements/perfectExercise_0.svg'
}]
actual = response.data.get('data').get('achievements')
self.assertEquals(len(actual), len(expected))
for i in actual:
self.assertTrue(expected.__contains__(i))
for i in expected:
self.assertTrue(actual.__contains__(i))
self.assertEquals(response.data.get('data').get('nr_unachieved_hidden'), 2)
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetAchievementsView.get(GetAchievementsView, request)
self.assertFalse(response.data.get('success'))
#trainer not allowed to
request = ViewSupport.setup_request({'Session-Token': JwToken.create_session_token(self.trainer.username, 'trainer')}, {})
response = GetAchievementsView.get(GetAchievementsView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetAchievementsView.get(GetAchievementsView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
def test_reload_friends(self):
Friends.objects.create(friend1=self.user1, friend2=self.user2, accepted=True)
#valid
#changed
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = ReloadFriendAchievementView.get(ReloadFriendAchievementView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('achievements'), {
'name': 'havingFriends',
'title': 'Freundschaft!',
'description': "habe einen Freund",
'level': 1,
'progress': 'done',
'hidden': False,
'icon': "www.test.de/friends1"
})
#nothing changed
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = ReloadFriendAchievementView.get(ReloadFriendAchievementView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data'), {})
#invalid
#as Trainer not possible
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = ReloadFriendAchievementView.get(ReloadFriendAchievementView, request)
self.assertFalse(response.data.get('success'))
#as Admin not possible
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = ReloadFriendAchievementView.get(ReloadFriendAchievementView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetAchievementsView.get(GetAchievementsView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ReloadFriendAchievementView.get(ReloadFriendAchievementView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#delete Friends again
Friends.objects.all().delete()
def test_reload_exercise(self):
#valid
#change
self.user1.streak = 7
self.user1.save(force_update=True)
self.user1:User = User.objects.get(username=self.user1.username)
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('achievements'), [{
'name': 'streak',
'title': 'Streak',
'description': "sammel eine Streak",
'level': 2,
'progress': '7/30',
'hidden': False,
'icon': "www.test.de/streak2"
}])
#no change
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data'), {})
#invalid
#as Trainer not possible
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertFalse(response.data.get('success'))
#as Admin not possible
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ReloadAfterExerciseView.get(ReloadAfterExerciseView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
def test_streak(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetStreakView.get(GetStreakView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('days'), 3)
self.assertTrue(response.data.get('data').get('flame_glow'))
self.assertEquals(response.data.get('data').get('flame_height'), 0.3)
#invalid
#as Trainer not possible
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetStreakView.get(GetStreakView, request)
self.assertFalse(response.data.get('success'))
#as Admin not possible
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetStreakView.get(GetStreakView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetStreakView.get(GetStreakView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetStreakView.get(GetStreakView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
class LevelTestCase(TestCase):
user1:User = None
user2:User = None
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer = trainer
user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=trainer, email_address="test@bla.de", password="Password1234")
user2.xp = 400
user2.save()
self.user1:User = user1
self.user2:User = user2
def test_level(self):
#user getting own level
request = ViewSupport.setup_request({'Session-Token': JwToken.create_session_token(self.user1.username, 'user')}, {'username': self.user1.username})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('level'), 0)
#user getting level of another user
request = ViewSupport.setup_request({'Session-Token': JwToken.create_session_token(self.user1.username, 'user')}, {'username': self.user2.username})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('level'), 1)
#trainer getting user's level
request = ViewSupport.setup_request({'Session-Token': JwToken.create_session_token(self.trainer.username, 'trainer')}, {'username': self.user2.username})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('level'), 1)
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'ìnvalid'}, {'username': self.user2.username})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertFalse(response.data.get('success'))
#trainers have no level
request = ViewSupport.setup_request({'Session-Token': JwToken.create_session_token(self.trainer.username, 'trainer')}, {'username': self.trainer.username})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetUserLevelView.post(GetUserLevelView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['username'])
class HandlingInvitesTestCase(TestCase):
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer = trainer
trainer2:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerAndereTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer2 = trainer2
token = JwToken.create_new_user_token(trainer.username, 'Jannis', 'Bauer', 'jannis@test.de', 'user')
self.ot1:OpenToken = OpenToken.objects.create(token=token, email='jannis@test.de', first_name='Jannis', last_name='Bauer', creator=trainer.username)
token = JwToken.create_new_user_token(trainer2.username, 'Julian', 'Imhof', 'julian@test.de', 'user')
self.ot2:OpenToken = OpenToken.objects.create(token=token, email='julian@test.de', first_name='Julian', last_name='Imhof', creator=trainer2.username)
self.token = JwToken.create_session_token('DerTrainer', 'trainer')
def test_get(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token}, {})
response = GetInvitedView.get(GetInvitedView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('invited'), InvitationsHandler.get_invited_data([self.ot1,]))
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetInvitedView.get(GetInvitedView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetInvitedView.get(GetInvitedView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
def test_invalidate(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token}, {'id': self.ot1.id})
response = InvalidateInviteView.post(InvalidateInviteView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(OpenToken.objects.filter(id=self.ot1.id).exists())
#invalid
#not allowed to delete
request = ViewSupport.setup_request({'Session-Token': self.token}, {'id': self.ot2.id})
response = InvalidateInviteView.post(InvalidateInviteView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(OpenToken.objects.filter(id=self.ot2.id).exists())
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'id': self.ot2.id})
response = InvalidateInviteView.post(InvalidateInviteView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = InvalidateInviteView.post(InvalidateInviteView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
class ProfileTestCase(TestCase):
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('Passwort'.encode('utf8')).hexdigest()))
self.trainer_id = trainer.id
user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('passwd'.encode('utf8')).hexdigest()))
user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=trainer, email_address="test@bla.de", password=str(hashlib.sha3_256('passwdyo'.encode('utf8')).hexdigest()))
self.user1_id = user1.id
self.user2_id = user2.id
self.token1 = JwToken.create_session_token(trainer.username, 'trainer')
self.token2 = JwToken.create_session_token(user1.username, 'user')
self.token3 = JwToken.create_session_token(user2.username, 'user')
def test_change_username(self):
#valid
#triner
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'username': 'neuerName'})
response = ChangeUsernameView.post(ChangeUsernameView, request)
self.assertTrue(response.data.get('success'))
trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(trainer.username, 'neuerName')
#user
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'username': 'coolerName'})
response = ChangeUsernameView.post(ChangeUsernameView, request)
self.assertTrue(response.data.get('success'))
user1:User = User.objects.get(id=self.user1_id)
self.assertEqual(user1.username, 'coolerName')
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'username': 'coolerName'})
response = ChangeUsernameView.post(ChangeUsernameView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangeUsernameView.post(ChangeUsernameView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['username'])
def test_change_password(self):
#valid
#trainer
request = ViewSupport.setup_request({'Session-Token': self.token1}, {
'password': 'Passwort',
'new_password': 'pswd_new'
})
response = ChangePasswordView.post(ChangePasswordView, request)
self.assertTrue(response.data.get('success'))
trainer:Trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(trainer.password, str(hashlib.sha3_256('pswd_new'.encode('utf8')).hexdigest()))
#user
request = ViewSupport.setup_request({'Session-Token': self.token2}, {
'password': 'passwd',
'new_password': 'neue1234'
})
response = ChangePasswordView.post(ChangePasswordView, request)
self.assertTrue(response.data.get('success'))
user1:User = User.objects.get(id=self.user1_id)
self.assertEqual(user1.password, str(hashlib.sha3_256('neue1234'.encode('utf8')).hexdigest()))
#invalid
#wrong password
request = ViewSupport.setup_request({'Session-Token': self.token3}, {
'password': 'wrong',
'new_password': 'neverReached'
})
response = ChangePasswordView.post(ChangePasswordView, request)
self.assertFalse(response.data.get('success'))
user2:User = User.objects.get(id=self.user2_id)
self.assertEqual(user2.password, str(hashlib.sha3_256('passwdyo'.encode('utf8')).hexdigest()))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'password': 'wrong',
'new_password': 'neverReached'
})
response = ChangePasswordView.post(ChangePasswordView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangePasswordView.post(ChangePasswordView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['password', 'new_password'])
def test_change_avatar(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'avatar': 1})
response = ChangeAvatarView.post(ChangeAvatarView, request)
self.assertTrue(response.data.get('success'))
user2:User = User.objects.get(id=self.user2_id)
self.assertEqual(user2.avatar, 1)
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'avatar': 2})
response = ChangeAvatarView.post(ChangeAvatarView, request)
self.assertTrue(response.data.get('success'))
user1:User = User.objects.get(id=self.user1_id)
self.assertEqual(user1.avatar, 2)
#invalid
#trainer not allowed
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'avatar': 1})
response = ChangeAvatarView.post(ChangeAvatarView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'avatar': 1})
response = ChangeAvatarView.post(ChangeAvatarView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangeAvatarView.post(ChangeAvatarView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['avatar'])
def test_change_motivation(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'motivation': 'Nieder mit der Schwerkraft, lang lebe der Leichtsinn'})
response = ChangeMotivationView.post(ChangeMotivationView, request)
self.assertTrue(response.data.get('success'))
user2:User = User.objects.get(id=self.user2_id)
self.assertEqual(user2.motivation, 'Nieder mit der Schwerkraft, lang lebe der Leichtsinn')
#invalid
#trainer not able to use
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'motivation': 'Nieder mit der Schwerkraft, lang lebe der Leichtsinn'})
response = ChangeMotivationView.post(ChangeMotivationView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'motivation': 'Nieder mit der Schwerkraft, lang lebe der Leichtsinn'})
response = ChangeMotivationView.post(ChangeMotivationView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangeMotivationView.post(ChangeMotivationView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['motivation'])
def test_profile_data(self):
#valid
#get profile
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetProfileView.get(GetProfileView, request)
self.assertTrue(response.data.get('success'))
user2:User = User.objects.get(id=self.user2_id)
self.assertEqual(user2.username, response.data.get('data').get('username'))
self.assertEqual(user2.avatar, response.data.get('data').get('avatar'))
self.assertEqual(user2.first_login, response.data.get('data').get('first_login'))
self.assertEqual(user2.motivation, response.data.get('data').get('motivation'))
#change telephone number of trainer
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'telephone': '015712251102'})
response = ChangeTrainerTelephoneView.post(ChangeTrainerTelephoneView, request)
self.assertTrue(response.data.get('success'))
trainer:Trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(trainer.telephone, '015712251102')
#change academia of trainer
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'academia': 'dr. nat'})
response = ChangeTrainerAcademiaView.post(ChangeTrainerAcademiaView, request)
self.assertTrue(response.data.get('success'))
trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(trainer.academia, 'dr. nat')
#change location of trainer
request = ViewSupport.setup_request({'Session-Token': self.token1}, {
'street': 'Straße',
'house_nr': '4',
'postal_code': '64287',
'city': 'Darmstadt',
'country': 'Deutschland',
'address_add': ''
})
response = SetTrainerLocationView.post(SetTrainerLocationView, request)
self.assertTrue(response.data.get('success'))
trainer = Trainer.objects.get(id=self.trainer_id)
loc:Location = Location.objects.get()
self.assertEqual(trainer.location, loc)
#user gets trainers contact
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetTrainerContactView.get(GetTrainerContactView, request)
self.assertTrue(response.data.get('success'))
trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(response.data.get('data').get('name'), 'dr. nat Erik Prescher')
self.assertEqual(response.data.get('data').get('address'), 'Straße 4, 64287 Darmstadt, Deutschland')
self.assertEqual(trainer.telephone, response.data.get('data').get('telephone'))
self.assertEqual(trainer.email_address, response.data.get('data').get('email'))
#trainer gets its contact
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetTrainerContactView.get(GetTrainerContactView, request)
self.assertTrue(response.data.get('success'))
trainer = Trainer.objects.get(id=self.trainer_id)
self.assertEqual(response.data.get('data').get('name'), 'dr. nat Erik Prescher')
self.assertEqual(response.data.get('data').get('academia'), 'dr. nat')
self.assertEqual(response.data.get('data').get('street'), 'Straße')
self.assertEqual(response.data.get('data').get('city'), 'Darmstadt')
self.assertEqual(response.data.get('data').get('country'), 'Deutschland')
self.assertEqual(response.data.get('data').get('address_addition'), '')
self.assertEqual(response.data.get('data').get('postal_code'), '64287')
self.assertEqual(response.data.get('data').get('house_nr'), '4')
self.assertEqual(trainer.telephone, response.data.get('data').get('telephone'))
self.assertEqual(trainer.email_address, response.data.get('data').get('email'))
#invalid
#trainer not allowed to get profile
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetProfileView.get(GetProfileView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetProfileView.get(GetProfileView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetProfileView.get(GetProfileView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#user not able to change telephone number
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'telephone': '015712251102'})
response = ChangeTrainerTelephoneView.post(ChangeTrainerTelephoneView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'telephone': '015712251102'})
response = ChangeTrainerTelephoneView.post(ChangeTrainerTelephoneView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangeTrainerTelephoneView.post(ChangeTrainerTelephoneView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['telephone'])
#user not able to change academia
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'academia': 'dr. nat'})
response = ChangeTrainerAcademiaView.post(ChangeTrainerAcademiaView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'academia': 'dr. nat'})
response = ChangeTrainerAcademiaView.post(ChangeTrainerAcademiaView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ChangeTrainerAcademiaView.post(ChangeTrainerAcademiaView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['academia'])
#user not able to change location
request = ViewSupport.setup_request({'Session-Token': self.token2}, {
'street': 'Straße',
'house_nr': '4',
'postal_code': '64287',
'city': 'Darmstadt',
'country': 'Deutschland',
'address_add': ''
})
response = SetTrainerLocationView.post(SetTrainerLocationView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'street': 'Straße',
'house_nr': '4',
'postal_code': '64287',
'city': 'Darmstadt',
'country': 'Deutschland',
'address_add': ''
})
response = SetTrainerLocationView.post(SetTrainerLocationView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = SetTrainerLocationView.post(SetTrainerLocationView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['street', 'postal_code', 'country', 'city', 'house_nr', 'address_add'])
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetTrainerContactView.get(GetTrainerContactView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetTrainerContactView.get(GetTrainerContactView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
def test_done_exercises_of_month(self):
#additional setup
ex:Exercise = Exercise.objects.create(title='Kniebeuge')
trainer:Trainer = Trainer.objects.get(id=self.trainer_id)
plan:TrainingSchedule = TrainingSchedule.objects.create(trainer=trainer)
exip:ExerciseInPlan = ExerciseInPlan.objects.create(sets=1, repeats_per_set=10, exercise=ex, plan=plan, date='saturday')
user:User = User.objects.get(id=self.user1_id)
user.plan = plan
user.save(force_update=True)
dex:DoneExercises = DoneExercises.objects.create(exercise=exip, user=user, points=100, completed=True, date=int(datetime.datetime(2022, 2, 12, 23, 52).timestamp()))
#valid
result = [{
"exercise_plan_id": dex.exercise.id,
"id": dex.exercise.exercise.id,
"date": dex.date,
"points": dex.points,
"done": True
}, {
"exercise_plan_id": exip.id,
"id": exip.exercise.id,
"date": int(datetime.datetime(year=2022, month=2, day=19, hour=12).timestamp()),
"points": None,
"done": False
}, {
"exercise_plan_id": exip.id,
"id": exip.exercise.id,
"date": int(datetime.datetime(year=2022, month=2, day=26, hour=12).timestamp()),
"points": None,
"done": False
}]
request = ViewSupport.setup_request({'Session-Token': self.token2}, {
'year': 2022,
'month': 2
})
response = GetDoneExercisesOfMonthView.post(GetDoneExercisesOfMonthView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('done'), result)
#invalid
#trainer not able to
request = ViewSupport.setup_request({'Session-Token': self.token1}, {
'year': 2022,
'month': 3
})
response = GetDoneExercisesOfMonthView.post(GetDoneExercisesOfMonthView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'year': 2022,
'month': 3
})
response = GetDoneExercisesOfMonthView.post(GetDoneExercisesOfMonthView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetDoneExercisesOfMonthView.post(GetDoneExercisesOfMonthView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['month', 'year'])
class TestUserViews(TestCase):
trainer_id = 1
user_id = 1
admin_id = 1
trainer_token = None
user_token = None
user_refresh_token = None
admin_token = None
new_user_token = None
new_trainer_token = None
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password=str(hashlib.sha3_256("Password1234".encode('utf8')).hexdigest()))
admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="derAdmin", password="Password1234")
self.user_id = user.id
self.admin_id = admin.id
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
self.user_token = JwToken.create_session_token(user.username, 'user')
self.admin_token = JwToken.create_session_token(admin.username, 'admin')
def test_delete_account(self):
#valid token
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
response = DeleteAccountView.post(DeleteAccountView, request=request)
self.assertTrue(response.data.get('success'))
self.assertFalse(User.objects.filter(id=self.user_id).exists())
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = DeleteAccountView.post(DeleteAccountView, request=request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = DeleteAccountView.post(DeleteAccountView, request=request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#setup user again
trainer:Trainer = Trainer.objects.get(id=self.trainer_id)
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password=str(hashlib.sha3_256("Password1234".encode('utf8')).hexdigest()))
self.user_id = user.id
self.user_token = JwToken.create_session_token(user.username, 'user')
def test_login(self):
#correct
request = ViewSupport.setup_request({}, {
'username': "DeadlyFarts",
'password': "Password1234"
})
response = LoginView.post(LoginView, request)
self.assertTrue(response.data.get('success'))
self.user_token = response.data.get('data').get('session_token')
self.user_refresh_token = response.data.get('data').get('refresh_token')
self.assertTrue(JwToken.check_session_token(self.user_token))
self.assertTrue(JwToken.check_refresh_token(self.user_refresh_token))
#invalid
#invalid username
request = ViewSupport.setup_request({}, {
'username': "cooleKids",
'password': "Password1234"
})
response = LoginView.post(LoginView, request)
self.assertFalse(response.data.get('success'))
#invalid pasword
request = ViewSupport.setup_request({}, {
'username': "DeadlyFarts",
'password': "wrong"
})
response = LoginView.post(LoginView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = LoginView.post(LoginView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['username', 'password'])
def test_register(self):
#register user
if self.new_user_token == None:
trainer:Trainer = Trainer.objects.get(id=self.trainer_id)
self.new_user_token = JwToken.create_new_user_token(trainer.username, 'Jannis', 'Bauer', 'bptestmail52@gmail.com', 'user')
OpenToken.objects.create(token=self.new_user_token, email='bptestmail52@gmail.com', first_name='Jannis', last_name='Bauer', creator=trainer.username)
request = ViewSupport.setup_request({}, {
'username': 'jbad',
'password': '1234567890',
'new_user_token': self.new_user_token
})
response = RegisterView.post(RegisterView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(User.objects.filter(username='jbad').exists())
#not again possible
request = ViewSupport.setup_request({}, {
'username': 'jbad',
'password': '1234567890',
'new_user_token': self.new_user_token
})
response = RegisterView.post(RegisterView, request)
self.assertFalse(response.data.get('success'))
#register trainer
if self.new_trainer_token == None:
admin:Admin = Admin.objects.get(id=self.admin_id)
self.new_trainer_token = JwToken.create_new_user_token(admin.username, 'Jannis', 'Bauer', 'bptestmail52@gmail.com', 'trainer')
OpenToken.objects.create(token=self.new_trainer_token, email='bptestmail52@gmail.com', first_name='Jannis', last_name='Bauer', creator=trainer.username)
request = ViewSupport.setup_request({}, {
'username': 'Notjbad',
'password': '1234567890',
'new_user_token': self.new_trainer_token
})
response = RegisterView.post(RegisterView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(Trainer.objects.filter(username='Notjbad').exists())
#invalid
#invalid token
request = ViewSupport.setup_request({}, {
'username': 'againjbad',
'password': '1234567890',
'new_user_token': 'invalid'
})
response = RegisterView.post(RegisterView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = RegisterView.post(RegisterView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['password', 'username', 'new_user_token'])
def test_createUser(self):
trainer:Trainer = Trainer.objects.get(first_name="Erik")
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
#create user
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {
'first_name': 'Jannis',
'last_name': 'Bauer',
'email_address': 'bptestmail52@gmail.com',
'url': 'bptest.com'
})
response = CreateUserView.post(CreateUserView, request)
self.assertTrue(response.data.get('success'))
self.new_user_token = response.data.get('data').get('new_user_token')
#create trainer
request = ViewSupport.setup_request({'Session-Token': self.admin_token}, {
'first_name': 'Jannis',
'last_name': 'Bauer',
'email_address': 'bptestmail52@gmail.com',
'url': 'bptest.com'
})
response = CreateUserView.post(CreateUserView, request)
self.assertTrue(response.data.get('success'))
self.new_trainer_token = response.data.get('data').get('new_user_token')
#invalid
#user not allowed to
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {
'first_name': 'Jannis',
'last_name': 'Bauer',
'email_address': 'bptestmail52@gmail.com',
'url': 'bptest.com'
})
response = CreateUserView.post(CreateUserView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'first_name': 'Jannis',
'last_name': 'Bauer',
'email_address': 'bptestmail52@gmail.com',
'url': 'bptest.com'
})
response = CreateUserView.post(CreateUserView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = CreateUserView.post(CreateUserView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['first_name', 'last_name', 'email_address', 'url'])
def test_auth(self):
#correct
if self.user_refresh_token == None:
self.user_refresh_token = JwToken.create_refresh_token('DeadlyFarts', 'user', True)
request = ViewSupport.setup_request({}, {
'refresh_token': self.user_refresh_token
})
response = AuthView.post(AuthView, request)
self.assertTrue(response.data.get('success'))
#invalid
#incorrect
request = ViewSupport.setup_request({}, {
'refresh_token': 'justsomeinvalidstuff'
})
response = AuthView.post(AuthView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = AuthView.post(AuthView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['refresh_token'])
def test_logoutAllDevices(self):
#valid
if self.user_refresh_token == None:
self.user_refresh_token = JwToken.create_refresh_token('DeadlyFarts', 'user', True)
self.assertTrue(JwToken.check_refresh_token(self.user_refresh_token).get('valid'))
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
time.sleep(10)
response = LogoutAllDevicesView.post(LogoutAllDevicesView, request)
self.assertTrue(response.data.get('success'))
request = ViewSupport.setup_request({}, {
'refresh_token': self.user_refresh_token
})
response = AuthView.post(AuthView, request)
self.assertFalse(response.data.get('success'))
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = LogoutAllDevicesView.post(LogoutAllDevicesView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = LogoutAllDevicesView.post(LogoutAllDevicesView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
class TestExerciseView(TestCase):
trainer_id = 1
ex_id = 1
trainer_token = None
user_token = None
admin_token = None
def setUp(self):
Exercise.objects.create(title='Kniebeuge', description='{"de": "Gehe in die Knie, achte...", "en": "Do squats..."}')
Exercise.objects.create(title='Liegestütze', description='{"de": "Mache Liegestütze...", "en": "Do pushups..."}', activated=False)
self.ex_id = Exercise.objects.get(title='Kniebeuge').id
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="derAdmin", password="Password1234")
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
self.user_token = JwToken.create_session_token(user.username, 'user')
self.admin_token = JwToken.create_session_token(admin.username, 'admin')
def test_get(self):
#valid exercise
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'id': self.ex_id})
response = GetExerciseView.post(GetExerciseView, request)
self.assertTrue(response.data.get('success'))
data = response.data.get('data')
self.assertEquals(data.get('title'), 'Kniebeuge')
self.assertEquals(data.get('description'), "Gehe in die Knie, achte...")
self.assertEquals(data.get('video'), None)
self.assertEquals(data.get('activated'), True)
#invalid
#invalid exercise
request = ViewSupport.setup_request({'Session-Token': self.trainer_token},{'id': 2543})
response = GetExerciseView.post(GetExerciseView, request)
self.assertFalse(response.data.get('success'))
#admin not allowed
request = ViewSupport.setup_request({'Session-Token': self.admin_token},{'id': self.ex_id})
response = GetExerciseView.post(GetExerciseView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'},{'id': self.ex_id})
response = GetExerciseView.post(GetExerciseView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({},{})
response = GetExerciseView.post(GetExerciseView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
def test_get_list(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {})
response = GetExerciseListView.get(GetExerciseListView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(len(response.data.get('data').get('exercises')) == len(Exercise.objects.all()))
#invalid
#user not allowed
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
response = GetExerciseListView.get(GetExerciseListView, request)
self.assertFalse(response.data.get('success'))
#admin not allowed
request = ViewSupport.setup_request({'Session-Token': self.admin_token}, {})
response = GetExerciseListView.get(GetExerciseListView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetExerciseListView.get(GetExerciseListView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetExerciseListView.get(GetExerciseListView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
class TestPlanView(TestCase):
trainer_token = None
user_token = None
trainer_id = 0
user_id = 0
ex_id = 0
ts_id = 0
def setUp(self):
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234")
self.user_id = user.id
ex:Exercise = Exercise.objects.create(title='Kniebeuge', description="Gehe in die Knie, achte...")
self.ex_id = ex.id
ts:TrainingSchedule = TrainingSchedule.objects.create(trainer=trainer)
self.ts_id = ts.id
ExerciseInPlan.objects.create(date="monday", sets=5, repeats_per_set=10, exercise=ex, plan=ts)
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
self.user_token = JwToken.create_session_token(user.username, 'user')
def test_create_new(self):
trainer:Trainer = Trainer.objects.get(first_name="Erik")
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
#valid
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}]
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(TrainingSchedule.objects.filter(id=int(response.data.get('data').get('plan_id'))).exists())
#invalid
#user not allowed
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}]
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}]
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['name', 'exercise'])
def test_create_change(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}],
'id': self.ts_id
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(TrainingSchedule.objects.filter(id=int(response.data.get('data').get('plan_id'))).exists())
self.ts_id = int(response.data.get('data').get('plan_id'))
#invalid
#user not allowed
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}],
'id': self.ts_id
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'name': 'test_plan',
'exercise': [{
"date": 'monday',
"sets": 4,
"repeats_per_set": 10,
"id": self.ex_id
}, {
"date": 'wednesday',
"sets": 3,
"repeats_per_set": 10,
"id": self.ex_id
}],
'id': self.ts_id
})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
#missing arguments (same as create cause id is optional argument)
request = ViewSupport.setup_request({}, {})
response = CreatePlanView.post(CreatePlanView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['name', 'exercise'])
def test_add_user(self):
INTERN_SETTINGS['last_leaderboard_reset'] = time.time()
TrainingSchedule.objects.create(name='addtouser_plan', trainer=Trainer.objects.get(id=self.trainer_id))
self.ts_id = TrainingSchedule.objects.get(name='addtouser_plan').id
user:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbadV", trainer=Trainer.objects.get(id=self.trainer_id), email_address="fake@web.de", password="Password1234")
#valid user and plan
user.plan = None
user.save()
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {
'plan': self.ts_id,
'user': 'jbadV'
})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertTrue(response.data.get('success'))
user = User.objects.get(username='jbadV')
self.assertEquals(user.plan.id, self.ts_id)
#invalid
#invalid user
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {
'plan': self.ts_id,
'user': '1234567'
})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertFalse(response.data.get('success'))
#invalid plan
user = User.objects.get(username='DeadlyFarts')
user.plan = None
user.save()
request = ViewSupport.setup_request({'Session-Token': self.trainer_id}, {
'plan': -1,
'user': 'DeadlyFarts'
})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertFalse(response.data.get('success'))
user = User.objects.get(username='DeadlyFarts')
self.assertEquals(user.plan, None)
#user not allowed to
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {
'plan': self.ts_id,
'user': 'DeadlyFarts'
})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertFalse(response.data.get('success'))
user = User.objects.get(username='DeadlyFarts')
self.assertEquals(user.plan, None)
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {
'plan': self.ts_id,
'user': 'DeadlyFarts'
})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = AddPlanToUserView.post(AddPlanToUserView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['user'])
def test_get_list(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {})
response = GetAllPlansView.get(GetAllPlansView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('plans')), len(TrainingSchedule.objects.filter(trainer=self.trainer_id)))
#invalid
#user not allowed to
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
response = GetAllPlansView.get(GetAllPlansView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetAllPlansView.get(GetAllPlansView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetAllPlansView.get(GetAllPlansView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
def test_get(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'plan': self.ts_id})
response = ShowPlanView.post(ShowPlanView, request)
ts:TrainingSchedule = TrainingSchedule.objects.get(id=self.ts_id)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('name'), ts.name)
self.assertEquals(len(response.data.get('data').get('exercises')), len(ExerciseInPlan.objects.filter(plan=self.ts_id)))
#invalid
#invalid plan
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'plan': -1})
response = ShowPlanView.post(ShowPlanView, request)
self.assertFalse(response.data.get('success'))
#user not allowed
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {'plan': self.ts_id})
response = ShowPlanView.post(ShowPlanView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'plan': self.ts_id})
response = ShowPlanView.post(ShowPlanView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ShowPlanView.post(ShowPlanView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['plan'])
def test_get_for_user(self):
#valid
user:User = User.objects.get(id=self.user_id)
ts:TrainingSchedule = TrainingSchedule.objects.create(name='getfromuser_plan', trainer=Trainer.objects.get(id=self.trainer_id))
#as user
if user.plan == None:
user.plan = ts
user.save()
elif not user.plan.id == ts.id:
user.plan = ts
user.save()
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('exercises')), len(ExerciseInPlan.objects.filter(plan=ts.id)))
#as trainer
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'username': user.username})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('exercises')), len(ExerciseInPlan.objects.filter(plan=ts.id)))
#invalid
#invalid user
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'username': 'user.username'})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertFalse(response.data.get('success'))
#invalid token as user
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertFalse(response.data.get('success'))
#invalid token as trainer
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'username': user.username})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#missing arguments as trainer
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {})
response = GetPlanOfUser.post(GetPlanOfUser, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['username'])
def test_delete(self):
#valid
ts:TrainingSchedule = TrainingSchedule.objects.create(name='delete_plan', trainer=Trainer.objects.get(id=self.trainer_id))
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'id': ts.id})
response = DeletePlanView.post(DeletePlanView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(TrainingSchedule.objects.filter(id=ts.id).exists())
#invalid
#invalid plan
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'id': -1})
response = DeletePlanView.post(DeletePlanView, request)
self.assertFalse(response.data.get('success'))
ts = TrainingSchedule.objects.create(name='delete_plan', trainer=Trainer.objects.get(id=self.trainer_id))
#user not allowed
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {'id': ts.id})
response = DeletePlanView.post(DeletePlanView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(TrainingSchedule.objects.filter(id=ts.id).exists())
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'id': ts.id})
response = DeletePlanView.post(DeletePlanView, request)
self.assertFalse(response.data.get('success'))
self.assertTrue(TrainingSchedule.objects.filter(id=ts.id).exists())
#missing arguments
request = ViewSupport.setup_request({}, {})
response = DeletePlanView.post(DeletePlanView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
class TestLeaderboardView(TestCase):
trainer_id = None
trainer_token = None
user_token = None
users = []
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.trainer_id = trainer.id
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
ts:TrainingSchedule = TrainingSchedule.objects.create(name='plan_for_everyone', trainer=trainer)
ex:Exercise = Exercise.objects.create(title='Kniebeuge')
ExerciseInPlan.objects.create(exercise=ex, plan=ts, sets=1, repeats_per_set=1)
User.objects.create(first_name="vorname", last_name="nachname", username="user1", email_address="user1@users.com", trainer=trainer,password="pswd22", plan=ts)
User.objects.create(first_name="vorname", last_name="nachname", username="user2", email_address="user2@users.com", trainer=trainer,password="pswd22", plan=ts)
User.objects.create(first_name="vorname", last_name="nachname", username="user3", email_address="user3@users.com", trainer=trainer,password="pswd22", plan=ts)
User.objects.create(first_name="vorname", last_name="nachname", username="user4", email_address="user4@users.com", trainer=trainer, password="pswd22", plan=ts)
User.objects.create(first_name="vorname", last_name="nachname", username="user5", email_address="user5@users.com", trainer=trainer,password="pswd22", plan=ts)
self.users = User.objects.all()
score = 60
for user in self.users:
Leaderboard.objects.create(user=user, score=score, cleanliness=score, intensity=score, speed=score, executions=1)
if score == 80:
self.user_token = JwToken.create_session_token(user.username, 'user')
score += 10
def test_get(self):
INTERN_SETTINGS['last_leaderboard_reset'] = time.time()
self.maxDiff = None
#as trainer
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'count': 3})
response = ListLeaderboardView.post(ListLeaderboardView, request)
self.assertTrue(response.data.get('success'))
leaderboard = []
entry = Leaderboard.objects.get(score=100)
leaderboard.append({"rank": 1, "username": 'vorname nachname', "score": 100})
entry = Leaderboard.objects.get(score=90)
leaderboard.append({"rank": 2, "username": 'vorname nachname', "score": 90})
entry = Leaderboard.objects.get(score=80)
leaderboard.append({"rank": 3, "username": 'vorname nachname', "score": 80})
get_response = []
for ent in response.data.get('data').get('leaderboard'):
get_response.append({"rank": ent.get('rank'), "username": ent.get('username'), "score": ent.get('score')})
self.assertEquals(get_response, leaderboard)
#as user
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {'count': 3})
response = ListLeaderboardView.post(ListLeaderboardView, request)
self.assertTrue(response.data.get('success'))
leaderboard = []
entry:Leaderboard = Leaderboard.objects.get(score=90)
leaderboard.append({"rank": 1, "username": entry.user.username, "score": 90})
entry = Leaderboard.objects.get(score=80)
leaderboard.append({"rank": 2, "username": entry.user.username, "score": 80})
entry = Leaderboard.objects.get(score=70)
leaderboard.append({"rank": 3, "username": entry.user.username, "score": 70})
get_response = []
for ent in response.data.get('data').get('leaderboard'):
get_response.append({"rank": ent.get('rank'), "username": ent.get('username'), "score": ent.get('score')})
self.assertEquals(get_response, leaderboard)
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'count': 3})
response = ListLeaderboardView.post(ListLeaderboardView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = ListLeaderboardView.post(ListLeaderboardView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['count'])
class TestDoneExercise(TestCase):
trainer_id = 1
ex = None
trainer_token = None
user_token = None
admin_token = None
user = None
exip_id = 0
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
self.ex:Exercise = Exercise.objects.create(title='Kniebeuge', description='{"de": "Gehe in die Knie, achte...", "en": "Do squats..."}')
ts:TrainingSchedule = TrainingSchedule.objects.create(trainer=trainer)
exip:ExerciseInPlan = ExerciseInPlan.objects.create(date="monday", sets=5, repeats_per_set=10, exercise=self.ex, plan=ts)
self.exip_id = exip.id
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password="Password1234", plan=ts)
admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="derAdmin", password="Password1234")
user.plan = ts
user.save(force_update=True)
self.user:User = User.objects.get(username='DeadlyFarts')
self.trainer_token = JwToken.create_session_token(trainer.username, 'trainer')
self.user_token = JwToken.create_session_token(user.username, 'user')
self.admin_token = JwToken.create_session_token(admin.username, 'admin')
Leaderboard.objects.create(user=User.objects.get(username=self.user.username), score=0)
def test_get_done(self): #not working, issue with method in tests
#valid
#as user
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {})
response = GetDoneExercisesView.get(GetDoneExercisesView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data, ExerciseHandler.get_done(self.user))
#as trainer
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'user': self.user.username})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data, ExerciseHandler.get_done(self.user))
#invalid
#trainer cant call user method
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {})
response = GetDoneExercisesView.get(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#admin cant call user method
request = ViewSupport.setup_request({'Session-Token': self.admin_token}, {})
response = GetDoneExercisesView.get(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#invalid token for user method
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetDoneExercisesView.get(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#missing arguments for user method
request = ViewSupport.setup_request({}, {})
response = GetDoneExercisesView.get(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#invalid user for trainer
request = ViewSupport.setup_request({'Session-Token': self.trainer_token}, {'user': 'unknown'})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#user cant call trainer method
request = ViewSupport.setup_request({'Session-Token': self.user_token}, {'user': self.user.username})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#admin cant call trainer method
request = ViewSupport.setup_request({'Session-Token': self.admin_token}, {'user': self.user.username})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#invalid token for trainer method
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'user': self.user.username})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
#missing arguments for trainer method
request = ViewSupport.setup_request({}, {})
response = GetDoneExercisesView.post(GetDoneExercisesView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['user'])
class TestFriendSystem(TestCase):
users = []
admin = None
trainer = None
token1 = None
token2 = None
token3 = None
token4 = None
token5 = None
def setUp(self) -> None:
self.admin:Admin = Admin.objects.create(first_name="Erik", last_name="Prescher", username="DerAdmin", password="Password1234")
self.trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password="Password1234")
User.objects.create(first_name="vorname", last_name="nachname", username="user1", email_address="user1@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user2", email_address="user2@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user3", email_address="user3@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user4", email_address="user4@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user5", email_address="user5@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user6", email_address="user6@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user7", email_address="user7@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user8", email_address="user8@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user9", email_address="user9@users.com", trainer=self.trainer,password="pswd22")
User.objects.create(first_name="vorname", last_name="nachname", username="user10", email_address="user10@users.com", trainer=self.trainer,password="pswd22")
self.users = list(User.objects.all())
self.token1 = JwToken.create_session_token(self.admin.username, 'admin')
self.token2 = JwToken.create_session_token(self.trainer.username, 'trainer')
self.token3 = JwToken.create_session_token(self.users[0].username, 'user')
self.token4 = JwToken.create_session_token(self.users[1].username, 'user')
self.token5 = JwToken.create_session_token(self.users[2].username, 'user')
def test_system(self):
#valid
#user1 adds user2
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'username': 'user2'})
response = AddFriendView.post(AddFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(Friends.objects.filter(friend1=self.users[0], friend2=self.users[1], accepted=False).exists())
#user2 adds user1
request = ViewSupport.setup_request({'Session-Token': self.token4}, {'username': 'user1'})
response = AddFriendView.post(AddFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(Friends.objects.filter(friend1=self.users[1], friend2=self.users[0], accepted=False).exists())
#user1 adds user3
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'username': 'user3'})
response = AddFriendView.post(AddFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertTrue(Friends.objects.filter(friend1=self.users[0], friend2=self.users[2], accepted=False).exists())
#user1 get requests
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetRequestView.get(GetRequestView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('requests')), 1)
self.assertEquals(response.data.get('data').get('requests'), FriendHandler.get_requests(self.users[0]))
#user1 get pending
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetPendingRequestView.get(GetPendingRequestView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('pending')), 2)
self.assertEquals(response.data.get('data').get('pending'), FriendHandler.get_pending_requests(self.users[0]))
#user1 accepts from user2
id = Friends.objects.get(friend1=self.users[1], friend2=self.users[0], accepted=False).id
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'id': id})
response = AcceptRequestView.post(AcceptRequestView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(Friends.objects.filter(friend1=self.users[1], friend2=self.users[0], accepted=False).exists())
self.assertTrue(Friends.objects.filter(friend1=self.users[1], friend2=self.users[0], accepted=True).exists())
self.assertFalse(Friends.objects.filter(friend1=self.users[0], friend2=self.users[1]).exists())
#user3 declines from user1
id = Friends.objects.get(friend1=self.users[0], friend2=self.users[2], accepted=False).id
request = ViewSupport.setup_request({'Session-Token': self.token5}, {'id': id})
response = DeclineRequestView.post(DeclineRequestView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(Friends.objects.filter(friend1=self.users[0], friend2=self.users[2]).exists())
#user1 get friends
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetMyFriendsView.get(GetMyFriendsView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(len(response.data.get('data').get('friends')), 1)
self.assertEquals(response.data.get('data').get('friends'), FriendHandler.get_friends(self.users[0]))
#user1 delete friend (user2)
id = Friends.objects.get(friend1=self.users[1], friend2=self.users[0], accepted=True).id
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'id': id})
response = DeleteFriendView.post(DeleteFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertFalse(Friends.objects.filter(friend1=self.users[1], friend2=self.users[0]).exists())
#invalid
#admin not able to use
#add
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'username': 'user2'})
response = AddFriendView.post(AddFriendView, request)
self.assertFalse(response.data.get('success'))
#requests
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetRequestView.get(GetRequestView, request)
self.assertFalse(response.data.get('success'))
#pending requests
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetPendingRequestView.get(GetPendingRequestView, request)
self.assertFalse(response.data.get('success'))
#friends
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetMyFriendsView.get(GetMyFriendsView, request)
self.assertFalse(response.data.get('success'))
#accept
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'id': 1})
response = AcceptRequestView.post(AcceptRequestView, request)
self.assertFalse(response.data.get('success'))
#decline
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'id': 1})
response = DeclineRequestView.post(DeclineRequestView, request)
self.assertFalse(response.data.get('success'))
#delete
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'id': 1})
response = DeleteFriendView.post(DeleteFriendView, request)
self.assertFalse(response.data.get('success'))
#trainers not able to use
#add
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'username': 'user2'})
response = AddFriendView.post(AddFriendView, request)
self.assertFalse(response.data.get('success'))
#requests
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetRequestView.get(GetRequestView, request)
self.assertFalse(response.data.get('success'))
#pending requests
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetPendingRequestView.get(GetPendingRequestView, request)
self.assertFalse(response.data.get('success'))
#friends
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetMyFriendsView.get(GetMyFriendsView, request)
self.assertFalse(response.data.get('success'))
#accept
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'id': 1})
response = AcceptRequestView.post(AcceptRequestView, request)
self.assertFalse(response.data.get('success'))
#decline
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'id': 1})
response = DeclineRequestView.post(DeclineRequestView, request)
self.assertFalse(response.data.get('success'))
#delete
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'id': 1})
response = DeleteFriendView.post(DeleteFriendView, request)
self.assertFalse(response.data.get('success'))
#invalid token
#add
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'username': 'user2'})
response = AddFriendView.post(AddFriendView, request)
self.assertFalse(response.data.get('success'))
#requests
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetRequestView.get(GetRequestView, request)
self.assertFalse(response.data.get('success'))
#pending requests
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetPendingRequestView.get(GetPendingRequestView, request)
self.assertFalse(response.data.get('success'))
#friends
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetMyFriendsView.get(GetMyFriendsView, request)
self.assertFalse(response.data.get('success'))
#accept
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'id': 1})
response = AcceptRequestView.post(AcceptRequestView, request)
self.assertFalse(response.data.get('success'))
#decline
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'id': 1})
response = DeclineRequestView.post(DeclineRequestView, request)
self.assertFalse(response.data.get('success'))
#delete
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'id': 1})
response = DeleteFriendView.post(DeleteFriendView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
#add
request = ViewSupport.setup_request({}, {})
response = AddFriendView.post(AddFriendView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['username'])
#requests
request = ViewSupport.setup_request({}, {})
response = GetRequestView.get(GetRequestView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#pending requests
request = ViewSupport.setup_request({}, {})
response = GetPendingRequestView.get(GetPendingRequestView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#friends
request = ViewSupport.setup_request({}, {})
response = GetMyFriendsView.get(GetMyFriendsView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
#accept
request = ViewSupport.setup_request({}, {})
response = AcceptRequestView.post(AcceptRequestView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
#decline
request = ViewSupport.setup_request({}, {})
response = DeclineRequestView.post(DeclineRequestView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
#delete
request = ViewSupport.setup_request({}, {})
response = DeleteFriendView.post(DeleteFriendView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['id'])
def test_pattern_search(self):
#valid
#as admin
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'search': 'user1'})
response = SearchUserView.post(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data([User.objects.get(username='user1'), User.objects.get(username='user10')]))
#as trainer
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'search': 'user1'})
response = SearchUserView.post(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data([User.objects.get(username='user1'), User.objects.get(username='user10')]))
#as user
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'search': 'user1'})
response = SearchUserView.post(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data([User.objects.get(username='user10')]))
#empty
request = ViewSupport.setup_request({'Session-Token': self.token4}, {'search': 'del'})
response = SearchUserView.post(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data([]))
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'search': 'del'})
response = SearchUserView.post(SearchUserView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = SearchUserView.post(SearchUserView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['search'])
def test_get_list(self):
#valid
#as admin
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetListOfUsers.get(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data(User.objects.all()))
#as trainer
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetListOfUsers.get(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data(User.objects.all()))
#as user
request = ViewSupport.setup_request({'Session-Token': self.token3}, {})
response = GetListOfUsers.get(SearchUserView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('users'), UserHandler.get_users_data(User.objects.all().exclude(username='user1')))
#invalid
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetListOfUsers.get(SearchUserView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetListOfUsers.get(SearchUserView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
class TestResetPassword(TestCase):
token2 = None
token3 = None
user_id = 0
trainer_id = 0
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Jannis", last_name="Bauer", username="DerTrainer", email_address="bptestmail52@gmail.com", password=str(hashlib.sha3_256('Passwort'.encode('utf8')).hexdigest()))
self.trainer_id = trainer.id
user:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="derNutzer", trainer=trainer, email_address="bptestmail52@gmail.com", password=str(hashlib.sha3_256('passwd'.encode('utf8')).hexdigest()))
self.user_id = user.id
self.token2 = JwToken.create_reset_password_token('DerTrainer')
self.token3 = JwToken.create_reset_password_token('derNutzer')
def test_send(self):
#valid
#user
request = ViewSupport.setup_request({}, {
'username': 'derNutzer',
'url': 'www.test/#/'
})
response = GetPasswordResetEmailView.post(GetPasswordResetEmailView, request)
self.assertTrue(response.data.get('success'))
'''not implemented yet
#trainer
request = ViewSupport.setup_request({}, {
'username': 'DerTrainer',
'url': 'www.test/#/'
})
response = GetPasswordResetEmailView.post(GetPasswordResetEmailView, request)
self.assertTrue(response.data.get('success'))'''
#invalid
#invalid username
request = ViewSupport.setup_request({}, {
'username': 'invalid',
'url': 'www.test/#/'
})
response = GetPasswordResetEmailView.post(GetPasswordResetEmailView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetPasswordResetEmailView.post(GetPasswordResetEmailView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['username', 'url'])
def test_change(self):
#valid
#user
request = ViewSupport.setup_request({}, {
'reset_token': self.token3,
'new_password': 'newFancy'
})
response = SetPasswordResetEmailView.post(SetPasswordResetEmailView, request)
self.assertTrue(response.data.get('success'))
user:User = User.objects.get(id=self.user_id)
self.assertEquals(user.password, str(hashlib.sha3_256('newFancy'.encode('utf8')).hexdigest()))
'''not implemented yet
#trainer
request = ViewSupport.setup_request({}, {
'reset_token': self.token2,
'new_password': 'newFancy'
})
response = SetPasswordResetEmailView.post(SetPasswordResetEmailView, request)
self.assertTrue(response.data.get('success'))
user = Trainer.objects.get(id=self.trainer_id)
self.assertEquals(user.password, str(hashlib.sha3_256('newFancy'.encode('utf8')).hexdigest()))'''
#invalid
#invalid token
request = ViewSupport.setup_request({}, {
'reset_token': 'invalid',
'new_password': 'newFancy'
})
response = SetPasswordResetEmailView.post(SetPasswordResetEmailView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = SetPasswordResetEmailView.post(SetPasswordResetEmailView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), [])
self.assertEquals(response.data.get('data').get('data'), ['reset_token', 'new_password'])
class TestMedals(TestCase):
user1_id = 0
user2_id = 2
trainer_id = 0
umixs = []
token1 = None
token2 = None
token3 = None
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('Passwort'.encode('utf8')).hexdigest()))
self.trainer_id = trainer.id
user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('passwd'.encode('utf8')).hexdigest()))
user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=trainer, email_address="test@bla.de", password=str(hashlib.sha3_256('passwdyo'.encode('utf8')).hexdigest()))
self.user1_id = user1.id
self.user2_id = user2.id
self.token1 = JwToken.create_session_token(trainer.username, 'trainer')
self.token2 = JwToken.create_session_token(user1.username, 'user')
self.token3 = JwToken.create_session_token(user2.username, 'user')
ex1 = Exercise.objects.create(title='Kniebeuge')
ex2 = Exercise.objects.create(title='Liegestütze')
UserMedalInExercise.objects.create(user=user1, gold=2, silver=5, exercise=ex1)
UserMedalInExercise.objects.create(user=user1, gold=4, bronze=3, exercise=ex2)
UserMedalInExercise.objects.create(user=user2, gold=6, silver=1, exercise=ex1)
UserMedalInExercise.objects.create(user=user2, gold=1, silver=2, bronze=4, exercise=ex2)
self.umixs = list(UserMedalInExercise.objects.all())
def test_get(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token2}, {})
response = GetMedals.get(GetMedals, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('medals'), [{
'exercise': 'Kniebeuge',
'gold': 2,
'silver': 5,
'bronze': 0
},{
'exercise': 'Liegestütze',
'gold': 4,
'silver': 0,
'bronze': 3
}])
request = ViewSupport.setup_request({'Session-Token': self.token3},{})
response = GetMedals.get(GetMedals, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data').get('medals'), [{
'exercise': 'Kniebeuge',
'gold': 6,
'silver': 1,
'bronze': 0
},{
'exercise': 'Liegestütze',
'gold': 1,
'silver': 2,
'bronze': 4
}])
#invalid
#trainer not allowed
request = ViewSupport.setup_request({'Session-Token': self.token1}, {})
response = GetMedals.get(GetMedals, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {})
response = GetMedals.get(GetMedals, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetMedals.get(GetMedals, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), [])
class TestProfileOfFriends(TestCase):
token1 = None
token2 = None
token3 = None
def setUp(self) -> None:
trainer:Trainer = Trainer.objects.create(first_name="Erik", last_name="Prescher", username="DerTrainer", email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('Passwort'.encode('utf8')).hexdigest()))
user1:User = User.objects.create(first_name="Erik", last_name="Prescher", username="DeadlyFarts", trainer=trainer, email_address="prescher-erik@web.de", password=str(hashlib.sha3_256('passwd'.encode('utf8')).hexdigest()), avatar=5, motivation='Krise', xp=20)
user2:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbad", trainer=trainer, email_address="test@bla.de", password=str(hashlib.sha3_256('passwdyo'.encode('utf8')).hexdigest()), avatar=2, motivation='Gute Tage', xp=5000)
user3:User = User.objects.create(first_name="Jannis", last_name="Bauer", username="jbadV", trainer=trainer, email_address="test@bla.de", password=str(hashlib.sha3_256('passwdyo'.encode('utf8')).hexdigest()), avatar=4, motivation='Es lebe der Leichtsinn', xp=60000)
Friends.objects.create(friend1=user1, friend2=user2, accepted=True)
Friends.objects.create(friend1=user1, friend2=user3, accepted=False)
self.token1 = JwToken.create_session_token(trainer.username, 'trainer')
self.token2 = JwToken.create_session_token(user1.username, 'user')
self.token3 = JwToken.create_session_token(user2.username, 'user')
achievement:Achievement = Achievement.objects.create(name='streak', description='{"en": "get a streak"}')
UserAchievedAchievement.objects.create(achievement=achievement, level=1, user=user2, date=time.time())
def test(self):
#valid
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'username': 'jbad'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data'), {
'username': 'jbad',
'level': UserHandler.calc_level(5000, 200)[0],
'level_progress': UserHandler.calc_level(5000, 200)[1],
'avatar': 2,
'motivation': 'Gute Tage',
'last_login': None,
'days': 0,
'flame_height': 0.0,
'last_achievements': [{
'name': 'streak',
'icon': None
}]
})
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'username': 'DeadlyFarts'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertTrue(response.data.get('success'))
self.assertEquals(response.data.get('data'), {
'username': 'DeadlyFarts',
'level': 0,
'level_progress': UserHandler.calc_level(20, 200)[1],
'avatar': 5,
'motivation': 'Krise',
'last_login': None,
'days': 0,
'flame_height': 0.0,
'last_achievements': []
})
#invalid
#not friends
request = ViewSupport.setup_request({'Session-Token': self.token3}, {'username': 'jbadV'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertFalse(response.data.get('success'))
#not accepted
request = ViewSupport.setup_request({'Session-Token': self.token2}, {'username': 'jbadV'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertFalse(response.data.get('success'))
#trainer not allowed to
request = ViewSupport.setup_request({'Session-Token': self.token1}, {'username': 'jbadV'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertFalse(response.data.get('success'))
#invalid token
request = ViewSupport.setup_request({'Session-Token': 'invalid'}, {'username': 'jbadV'})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertFalse(response.data.get('success'))
#missing arguments
request = ViewSupport.setup_request({}, {})
response = GetProfileOfFriendView.post(GetProfileOfFriendView, request)
self.assertFalse(response.data.get('success'))
self.assertEquals(response.data.get('data').get('header'), ['Session-Token'])
self.assertEquals(response.data.get('data').get('data'), ['username']) | 56.312223 | 549 | 0.663744 | 13,450 | 127,153 | 6.186468 | 0.03829 | 0.050728 | 0.077156 | 0.090857 | 0.89751 | 0.873342 | 0.843236 | 0.818479 | 0.784792 | 0.745986 | 0 | 0.011854 | 0.191242 | 127,153 | 2,258 | 550 | 56.312223 | 0.797279 | 0.035917 | 0 | 0.72262 | 0 | 0.001101 | 0.141366 | 0.00419 | 0 | 0 | 0 | 0 | 0.272427 | 1 | 0.034673 | false | 0.066043 | 0.008255 | 0 | 0.095212 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
8672e2150ee3dcf8603c70c126bf2baa76382cd9 | 151,655 | py | Python | dlkit/manager_impls/assessment/managers.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 2 | 2018-02-23T12:16:11.000Z | 2020-10-08T17:54:24.000Z | dlkit/manager_impls/assessment/managers.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 87 | 2017-04-21T18:57:15.000Z | 2021-12-13T19:43:57.000Z | dlkit/manager_impls/assessment/managers.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 1 | 2018-03-01T16:44:25.000Z | 2018-03-01T16:44:25.000Z | """Manager utility implementations of assessment managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from ..osid import managers as osid_managers
from ..osid.osid_errors import NullArgument
from ..osid.osid_errors import Unimplemented
from ..type.objects import TypeList
from dlkit.abstract_osid.assessment import managers as abc_assessment_managers
class AssessmentProfile(abc_assessment_managers.AssessmentProfile, osid_managers.OsidProfile):
"""The ``AssessmentProfile`` describes the interoperability among assessment services."""
def supports_visible_federation(self):
"""Tests if federation is visible.
return: (boolean) - ``true`` if visible federation is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_my_assessment_taken(self):
"""Tests if a session is available to lookup taken assessments for the authenticated agent.
return: (boolean) - ``true`` if my assessment taken session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment(self):
"""Tests for the availability of a assessment service which is the service for taking and examining assessments taken.
return: (boolean) - ``true`` if assessment is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_results(self):
"""Tests for the availability of an assessment rsults service.
return: (boolean) - ``true`` if assessment results is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_lookup(self):
"""Tests if an item lookup service is supported.
return: (boolean) - true if item lookup is supported, false
otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_query(self):
"""Tests if an item query service is supported.
return: (boolean) - ``true`` if item query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_search(self):
"""Tests if an item search service is supported.
return: (boolean) - ``true`` if item search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_admin(self):
"""Tests if an item administrative service is supported.
return: (boolean) - ``true`` if item admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_notification(self):
"""Tests if item notification is supported.
Messages may be sent when items are created, modified, or
deleted.
return: (boolean) - ``true`` if item notification is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_bank(self):
"""Tests if an item to bank lookup session is available.
return: (boolean) - ``true`` if item bank lookup session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_bank_assignment(self):
"""Tests if an item to bank assignment session is available.
return: (boolean) - ``true`` if item bank assignment is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_item_smart_bank(self):
"""Tests if an item smart bank session is available.
return: (boolean) - ``true`` if item smart bank session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_lookup(self):
"""Tests if an assessment lookup service is supported.
An assessment lookup service defines methods to access
assessments.
return: (boolean) - true if assessment lookup is supported,
false otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_query(self):
"""Tests if an assessment query service is supported.
return: (boolean) - ``true`` if assessment query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_search(self):
"""Tests if an assessment search service is supported.
return: (boolean) - ``true`` if assessment search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_admin(self):
"""Tests if an assessment administrative service is supported.
return: (boolean) - ``true`` if assessment admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_notification(self):
"""Tests if assessment notification is supported.
Messages may be sent when assessments are created, modified, or
deleted.
return: (boolean) - ``true`` if assessment notification is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_bank(self):
"""Tests if an assessment to bank lookup session is available.
return: (boolean) - ``true`` if assessment bank lookup session
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_bank_assignment(self):
"""Tests if an assessment to bank assignment session is available.
return: (boolean) - ``true`` if assessment bank assignment is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_smart_bank(self):
"""Tests if an assessment smart bank session is available.
return: (boolean) - ``true`` if assessment smart bank session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_basic_authoring(self):
"""Tests if an assessment basic authoring session is available.
return: (boolean) - ``true`` if assessment basic authoring is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_lookup(self):
"""Tests if an assessment offered lookup service is supported.
return: (boolean) - true if assessment offered lookup is
supported, false otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_query(self):
"""Tests if an assessment offered query service is supported.
return: (boolean) - ``true`` if assessment offered query is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_search(self):
"""Tests if an assessment offered search service is supported.
return: (boolean) - ``true`` if assessment offered search is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_admin(self):
"""Tests if an assessment offered administrative service is supported.
return: (boolean) - ``true`` if assessment offered admin is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_notification(self):
"""Tests if assessment offered notification is supported.
Messages may be sent when offered assessments are created,
modified, or deleted.
return: (boolean) - ``true`` if assessment offered notification
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_bank(self):
"""Tests if an assessment offered to bank lookup session is available.
return: (boolean) - ``true`` if assessment offered bank lookup
session is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_bank_assignment(self):
"""Tests if an assessment offered to bank assignment session is available.
return: (boolean) - ``true`` if assessment offered bank
assignment is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_offered_smart_bank(self):
"""Tests if an assessment offered smart bank session is available.
return: (boolean) - ``true`` if assessment offered smart bank
session is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_lookup(self):
"""Tests if an assessment taken lookup service is supported.
return: (boolean) - ``true`` if assessment taken lookup is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_query(self):
"""Tests if an assessment taken query service is supported.
return: (boolean) - ``true`` if assessment taken query is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_search(self):
"""Tests if an assessment taken search service is supported.
return: (boolean) - ``true`` if assessment taken search is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_admin(self):
"""Tests if an assessment taken administrative service is supported which is used to instantiate an assessment offered.
return: (boolean) - ``true`` if assessment taken admin is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_notification(self):
"""Tests if assessment taken notification is supported.
Messages may be sent when items are created, modified, or
deleted.
return: (boolean) - ``true`` if assessment taken notification is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_bank(self):
"""Tests if an assessment taken to bank lookup session is available.
return: (boolean) - ``true`` if assessment taken bank lookup
session is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_bank_assignment(self):
"""Tests if an assessment taken to bank assignment session is available.
return: (boolean) - ``true`` if assessment taken bank assignment
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_taken_smart_bank(self):
"""Tests if an assessment taken smart bank session is available.
return: (boolean) - ``true`` if assessment taken smart bank
session is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_lookup(self):
"""Tests if a bank lookup service is supported.
A bank lookup service defines methods to access assessment
banks.
return: (boolean) - ``true`` if bank lookup is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_query(self):
"""Tests if a bank query service is supported.
return: (boolean) - ``true`` if bank query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_search(self):
"""Tests if a bank search service is supported.
return: (boolean) - ``true`` if bank search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_admin(self):
"""Tests if a banlk administrative service is supported.
return: (boolean) - ``true`` if bank admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_notification(self):
"""Tests if bank notification is supported.
Messages may be sent when items are created, modified, or
deleted.
return: (boolean) - ``true`` if bank notification is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_hierarchy(self):
"""Tests if a bank hierarchy traversal is supported.
return: (boolean) - ``true`` if a bank hierarchy traversal is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_bank_hierarchy_design(self):
"""Tests if bank hierarchy design is supported.
return: (boolean) - ``true`` if a bank hierarchy design is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_authoring(self):
"""Tests if an assessment authoring service is supported.
return: (boolean) - ``true`` if an assessment authoring is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_assessment_batch(self):
"""Tests if an assessment batch service is supported.
return: (boolean) - ``true`` if an assessment batch service is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def get_item_record_types(self):
"""Gets the supported ``Item`` record types.
return: (osid.type.TypeList) - a list containing the supported
``Item`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
item_record_types = property(fget=get_item_record_types)
def supports_item_record_type(self, item_record_type=None):
"""Tests if the given ``Item`` record type is supported.
arg: item_record_type (osid.type.Type): a ``Type`` indicating
a ``Item`` record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``item_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if item_record_type is None:
raise NullArgument()
return False
def get_item_search_record_types(self):
"""Gets the supported ``Item`` search record types.
return: (osid.type.TypeList) - a list containing the supported
``Item`` search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
item_search_record_types = property(fget=get_item_search_record_types)
def supports_item_search_record_type(self, item_search_record_type=None):
"""Tests if the given ``Item`` search record type is supported.
arg: item_search_record_type (osid.type.Type): a ``Type``
indicating an ``Item`` search record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``item_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if item_search_record_type is None:
raise NullArgument()
return False
def get_assessment_record_types(self):
"""Gets the supported ``Assessment`` record types.
return: (osid.type.TypeList) - a list containing the supported
``Assessment`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_record_types = property(fget=get_assessment_record_types)
def supports_assessment_record_type(self, assessment_record_type=None):
"""Tests if the given ``Assessment`` record type is supported.
arg: assessment_record_type (osid.type.Type): a ``Type``
indicating an ``Assessment`` record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_record_type is None:
raise NullArgument()
return False
def get_assessment_search_record_types(self):
"""Gets the supported ``Assessment`` search record types.
return: (osid.type.TypeList) - a list containing the supported
assessment search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_search_record_types = property(fget=get_assessment_search_record_types)
def supports_assessment_search_record_type(self, assessment_search_record_type=None):
"""Tests if the given assessment search record type is supported.
arg: assessment_search_record_type (osid.type.Type): a
``Type`` indicating an assessment search record type
return: (boolean) - ``true`` if the given search record Type is
supported, ``false`` otherwise
raise: NullArgument - ``assessment_search_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_search_record_type is None:
raise NullArgument()
return False
def get_assessment_offered_record_types(self):
"""Gets the supported ``AssessmentOffered`` record types.
return: (osid.type.TypeList) - a list containing the supported
``AssessmentOffered`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_offered_record_types = property(fget=get_assessment_offered_record_types)
def supports_assessment_offered_record_type(self, assessment_offered_record_type=None):
"""Tests if the given ``AssessmentOffered`` record type is supported.
arg: assessment_offered_record_type (osid.type.Type): a
``Type`` indicating an ``AssessmentOffered`` record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_offered_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_offered_record_type is None:
raise NullArgument()
return False
def get_assessment_offered_search_record_types(self):
"""Gets the supported ``AssessmentOffered`` search record types.
return: (osid.type.TypeList) - a list containing the supported
``AssessmentOffered`` search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_offered_search_record_types = property(fget=get_assessment_offered_search_record_types)
def supports_assessment_offered_search_record_type(self, assessment_offered_search_record_type=None):
"""Tests if the given ``AssessmentOffered`` search record type is supported.
arg: assessment_offered_search_record_type (osid.type.Type):
a ``Type`` indicating an ``AssessmentOffered`` search
record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_offered_search_record_type``
is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_offered_search_record_type is None:
raise NullArgument()
return False
def get_assessment_taken_record_types(self):
"""Gets the supported ``AssessmentTaken`` record types.
return: (osid.type.TypeList) - a list containing the supported
``AssessmentTaken`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_taken_record_types = property(fget=get_assessment_taken_record_types)
def supports_assessment_taken_record_type(self, assessment_taken_record_type=None):
"""Tests if the given ``AssessmentTaken`` record type is supported.
arg: assessment_taken_record_type (osid.type.Type): a
``Type`` indicating an ``AssessmentTaken`` record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_taken_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_taken_record_type is None:
raise NullArgument()
return False
def get_assessment_taken_search_record_types(self):
"""Gets the supported ``AssessmentTaken`` search record types.
return: (osid.type.TypeList) - a list containing the supported
``AssessmentTaken`` search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_taken_search_record_types = property(fget=get_assessment_taken_search_record_types)
def supports_assessment_taken_search_record_type(self, assessment_taken_search_record_type=None):
"""Tests if the given ``AssessmentTaken`` search record type is supported.
arg: assessment_taken_search_record_type (osid.type.Type): a
``Type`` indicating an ``AssessmentTaken`` search record
type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_taken_search_record_type``
is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_taken_search_record_type is None:
raise NullArgument()
return False
def get_assessment_section_record_types(self):
"""Gets the supported ``AssessmentSection`` record types.
return: (osid.type.TypeList) - a list containing the supported
``AssessmentSection`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
assessment_section_record_types = property(fget=get_assessment_section_record_types)
def supports_assessment_section_record_type(self, assessment_section_record_type=None):
"""Tests if the given ``AssessmentSection`` record type is supported.
arg: assessment_section_record_type (osid.type.Type): a
``Type`` indicating an ``AssessmentSection`` record type
return: (boolean) - ``true`` if the given Type is supported,
``false`` otherwise
raise: NullArgument - ``assessment_section_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if assessment_section_record_type is None:
raise NullArgument()
return False
def get_bank_record_types(self):
"""Gets the supported ``Bank`` record types.
return: (osid.type.TypeList) - a list containing the supported
``Bank`` record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
bank_record_types = property(fget=get_bank_record_types)
def supports_bank_record_type(self, bank_record_type=None):
"""Tests if the given ``Bank`` record type is supported.
arg: bank_record_type (osid.type.Type): a ``Type`` indicating
a ``Bank`` type
return: (boolean) - ``true`` if the given key record ``Type`` is
supported, ``false`` otherwise
raise: NullArgument - ``bank_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if bank_record_type is None:
raise NullArgument()
return False
def get_bank_search_record_types(self):
"""Gets the supported bank search record types.
return: (osid.type.TypeList) - a list containing the supported
``Bank`` search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
bank_search_record_types = property(fget=get_bank_search_record_types)
def supports_bank_search_record_type(self, bank_search_record_type=None):
"""Tests if the given bank search record type is supported.
arg: bank_search_record_type (osid.type.Type): a ``Type``
indicating a ``Bank`` search record type
return: (boolean) - ``true`` if the given search record ``Type``
is supported, ``false`` otherwise
raise: NullArgument - ``bank_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if bank_search_record_type is None:
raise NullArgument()
return False
class AssessmentManager(abc_assessment_managers.AssessmentManager, osid_managers.OsidManager, AssessmentProfile):
"""The assessment manager provides access to assessment sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``MyAssessmentTakenSession:`` a session to get taken or in
progress assessments for the current agent
* ``AssessmentSession:`` a session to be assessed and examine
assessments taken
* ``AssessmentResultsSession:`` a session to retrieve assessment
results
* ``ItemLookupSession:`` a session to look up ``Items``
* ``ItemQuerySession`` : a session to query ``Items``
* ``ItemSearchSession:`` a session to search ``Items``
* ``ItemAdminSession:`` a session to create, modify and delete
``Items``
* ``ItemNotificationSession: a`` session to receive messages
pertaining to ``Item`` changes
* ``ItemBankSession:`` a session for looking up item and bank
mappings
* ``ItemBankAssignmentSession:`` a session for managing item and
bank mappings
* ``ItemSmartBankSession:`` a session for managing dynamic banks
* ``AssessmentLookupSession:`` a session to look up
``Assessments``
* ``AssessmentQuerySession:`` a session to query ``Assessments``
* ``AssessmentSearchSession:`` a session to search ``Assessments``
* ``AssessmentAdminSession:`` a session to create, modify and
delete ``Assessments``
* ``AssessmentNotificationSession: a`` session to receive messages
pertaining to ``Assessment`` changes
* ``AssessmentBankSession:`` a session for looking up assessment
and bank mappings
* ``AssessmentBankAssignmentSession:`` a session for managing
assessment and bank mappings
* ``AssessmentSmartBankSession:`` a session for managing dynamic
banks
* ``AssessmentBasicAuthoringSession:`` a session for making simple
mappings of assessment items to assessments
* ``AssessmentOfferedLookupSession:`` a session to look up
``AssessmentsOffered``
* ``AssessmentOfferedQuerySession:`` a session to query
``AssessmentsOffered``
* ``AssessmentOfferedSearchSession`` : a session to search
``AssessmentsOffered``
* ``AssessmentOfferedAdminSession:`` a session to create, modify
and delete ``AssessmentsOffered``
* ``AssessmentOfferedNotificationSession: a`` session to receive
messages pertaining to ``AssessmentOffered`` changes
* ``AssessmentOfferedBankSession:`` a session for looking up
assessments offered and bank mappings
* ``AssessmentOfferedBankAssignmentSession:`` a session for
managing assessments offered and bank mappings
* ``AssessmentOfferedSmartBankSession`` : a session to manage
dynamic banks of assessments offered
* ``AssessmentTakenLookupSession:`` a session to look up
``Assessments``
* ``AssessmentTakenQuerySession:`` a session to query
``Assessments``
* ``AssessmentTakenSearchSession:`` a session to search
Assessments
* ``AssessmentTakenAdminSession:`` a session to create, modify and
delete ``AssessmentsTaken``
* ``AssessmentTakenNotificationSession: a`` session to receive
messages pertaining to ``AssessmentTaken`` changes
* ``AssessmentTakenBankSession:`` a session for looking up
assessments taken and bank mappings
* ``AssessmenttTakenBankAssignmentSession:`` a session for
managing assessments taken and bank mappings
* ``AssessmentTakenSmartBankSession:`` a session to manage dynamic
banks of assessments taken
* ``BankLookupSession:`` a session to lookup banks
* ``BankQuerySession`` : a session to query banks
* ``BankSearchSession:`` a session to search banks
* ``BankAdminSession`` : a session to create, modify and delete
banks
* ``BankNotificationSession`` : a session to receive messages
pertaining to ``Bank`` changes
* ``BankHierarchySession`` : a session to traverse the ``Bank``
hierarchy
* ``BankHierarchyDesignSession`` : a session to manage the
``Bank`` hierarchy
"""
def get_my_assessment_taken_session(self):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent.
return: (osid.assessment.MyAssessmentTakenSession) - a
``MyAssessmentTakenSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_my_assessment_taken()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
raise Unimplemented()
my_assessment_taken_session = property(fget=get_my_assessment_taken_session)
def get_my_assessment_taken_session_for_bank(self, bank_id=None):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent for the given bank ``Id``.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
return: (osid.assessment.MyAssessmentTakenSession) - a
``MyAssessmentTakenSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_my_assessment_taken()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_session(self):
"""Gets an ``AssessmentSession`` which is responsible for taking assessments and examining responses from assessments taken.
return: (osid.assessment.AssessmentSession) - an assessment
session for this service
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
raise Unimplemented()
assessment_session = property(fget=get_assessment_session)
def get_assessment_session_for_bank(self, bank_id=None):
"""Gets an ``AssessmentSession`` which is responsible for performing assessments for the given bank ``Id``.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
return: (osid.assessment.AssessmentSession) - an assessment
session for this service
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_results_session(self):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results.
return: (osid.assessment.AssessmentResultsSession) - an
assessment results session for this service
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_results()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
raise Unimplemented()
assessment_results_session = property(fget=get_assessment_results_session)
def get_assessment_results_session_for_bank(self, bank_id=None):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the assessment taken
return: (osid.assessment.AssessmentResultsSession) - an
assessment results session for this service
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_results()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_lookup_session(self):
"""Gets the ``OsidSession`` associated with the item lookup service.
return: (osid.assessment.ItemLookupSession) - an
``ItemLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` is ``true``.*
"""
raise Unimplemented()
item_lookup_session = property(fget=get_item_lookup_session)
def get_item_lookup_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the item lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.ItemLookupSession) - ``an
_item_lookup_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_query_session(self):
"""Gets the ``OsidSession`` associated with the item query service.
return: (osid.assessment.ItemQuerySession) - an
``ItemQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` is ``true``.*
"""
raise Unimplemented()
item_query_session = property(fget=get_item_query_session)
def get_item_query_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the item query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.ItemQuerySession) - ``an
_item_query_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_search_session(self):
"""Gets the ``OsidSession`` associated with the item search service.
return: (osid.assessment.ItemSearchSession) - an
``ItemSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` is ``true``.*
"""
raise Unimplemented()
item_search_session = property(fget=get_item_search_session)
def get_item_search_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the item search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.ItemSearchSession) - ``an
_item_search_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_admin_session(self):
"""Gets the ``OsidSession`` associated with the item administration service.
return: (osid.assessment.ItemAdminSession) - an
``ItemAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` is ``true``.*
"""
raise Unimplemented()
item_admin_session = property(fget=get_item_admin_session)
def get_item_admin_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the item admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.ItemAdminSession) - ``an
_item_admin_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_notification_session(self, item_receiver=None):
"""Gets the notification session for notifications pertaining to item changes.
arg: item_receiver (osid.assessment.ItemReceiver): the item
receiver interface
return: (osid.assessment.ItemNotificationSession) - an
``ItemNotificationSession``
raise: NullArgument - ``item_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_item_notification_session_for_bank(self, item_receiver=None, bank_id=None):
"""Gets the ``OsidSession`` associated with the item notification service for the given bank.
arg: item_receiver (osid.assessment.ItemReceiver): the item
receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentNotificationSession) - ``an
_item_notification_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``item_receiver`` or ``bank_id`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if item_receiver is None:
raise NullArgument
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_item_bank_session(self):
"""Gets the ``OsidSession`` associated with the item banking service.
return: (osid.assessment.ItemBankSession) - an
``ItemBankSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank()`` is ``true``.*
"""
raise Unimplemented()
item_bank_session = property(fget=get_item_bank_session)
def get_item_bank_assignment_session(self):
"""Gets the ``OsidSession`` associated with the item bank assignment service.
return: (osid.assessment.ItemBankAssignmentSession) - an
``ItemBankAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank_assignment()`` is ``true``.*
"""
raise Unimplemented()
item_bank_assignment_session = property(fget=get_item_bank_assignment_session)
def get_item_smart_bank_session(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the item smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.ItemSmartBankSession) - an
``ItemSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment lookup service.
return: (osid.assessment.AssessmentLookupSession) - an
``AssessmentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` is ``true``.*
"""
raise Unimplemented()
assessment_lookup_session = property(fget=get_assessment_lookup_session)
def get_assessment_lookup_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentLookupSession) - ``an
_assessment_lookup_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment query service.
return: (osid.assessment.AssessmentQuerySession) - an
``AssessmentQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
raise Unimplemented()
assessment_query_session = property(fget=get_assessment_query_session)
def get_assessment_query_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentQuerySession) - ``an
_assessment_query_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment search service.
return: (osid.assessment.AssessmentSearchSession) - an
``AssessmentSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` is ``true``.*
"""
raise Unimplemented()
assessment_search_session = property(fget=get_assessment_search_session)
def get_assessment_search_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentSearchSession) - ``an
_assessment_search_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment administration service.
return: (osid.assessment.AssessmentAdminSession) - an
``AssessmentAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` is ``true``.*
"""
raise Unimplemented()
assessment_admin_session = property(fget=get_assessment_admin_session)
def get_assessment_admin_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentAdminSession) - ``an
_assessment_admin_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_notification_session(self, assessment_receiver=None):
"""Gets the notification session for notifications pertaining to assessment changes.
arg: assessment_receiver
(osid.assessment.AssessmentReceiver): the assessment
receiver interface
return: (osid.assessment.AssessmentNotificationSession) - an
``AssessmentNotificationSession``
raise: NullArgument - ``assessment_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_assessment_notification_session_for_bank(self, assessment_receiver=None, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment notification service for the given bank.
arg: assessment_receiver
(osid.assessment.AssessmentReceiver): the assessment
receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentNotificationSession) - ``an
_assessment_notification_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``assessment_receiver`` or ``bank_id`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_notification()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_receiver is None:
raise NullArgument
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_bank_session(self):
"""Gets the ``OsidSession`` associated with the assessment banking service.
return: (osid.assessment.AssessmentBankSession) - an
``AssessmentBankSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank()`` is ``true``.*
"""
raise Unimplemented()
assessment_bank_session = property(fget=get_assessment_bank_session)
def get_assessment_bank_assignment_session(self):
"""Gets the ``OsidSession`` associated with the assessment bank assignment service.
return: (osid.assessment.AssessmentBankAssignmentSession) - an
``AssessmentBankAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank_assignment()`` is ``true``.*
"""
raise Unimplemented()
assessment_bank_assignment_session = property(fget=get_assessment_bank_assignment_session)
def get_assessment_smart_bank_session(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentSmartBankSession) - an
``AssessmentSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_basic_authoring_session(self):
"""Gets the ``OsidSession`` associated with the assessment authoring service.
return: (osid.assessment.AssessmentBasicAuthoringSession) - an
``AssessmentBasicAuthoringSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_basic_authoring()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` is ``true``.*
"""
raise Unimplemented()
assessment_basic_authoring_session = property(fget=get_assessment_basic_authoring_session)
def get_assessment_basic_authoring_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment authoring service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
return: (osid.assessment.AssessmentBasicAuthoringSession) - an
``AssessmentBasicAuthoringSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_basic_authoring()`` or
``supports_visibe_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service.
return: (osid.assessment.AssessmentOfferedLookupSession) - an
``AssessmentOfferedLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_lookup()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_lookup_session = property(fget=get_assessment_offered_lookup_session)
def get_assessment_offered_lookup_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedLookupSession) - an
``AssessmentOfferedLookupSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_lookup()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered query service.
return: (osid.assessment.AssessmentOfferedQuerySession) - an
``AssessmentOfferedQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_query_session = property(fget=get_assessment_offered_query_session)
def get_assessment_offered_query_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment offered query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedQuerySession) - an
``AssessmentOfferedQuerySession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_query()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered search service.
return: (osid.assessment.AssessmentOfferedSearchSession) - an
``AssessmentOfferedSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_search()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_search_session = property(fget=get_assessment_offered_search_session)
def get_assessment_offered_search_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment offered search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedSearchSession) - an
``AssessmentOfferedSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_search()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered administration service.
return: (osid.assessment.AssessmentOfferedAdminSession) - an
``AssessmentOfferedAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_admin()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_admin_session = property(fget=get_assessment_offered_admin_session)
def get_assessment_offered_admin_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment offered admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedAdminSession) - an
``AssessmentOfferedAdminSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_admin()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_notification_session(self, assessment_offered_receiver=None):
"""Gets the notification session for notifications pertaining to offered assessment changes.
arg: assessment_offered_receiver
(osid.assessment.AssessmentOfferedReceiver): the
assessment offered receiver interface
return: (osid.assessment.AssessmentOfferedNotificationSession) -
an ``AssessmentOfferedNotificationSession``
raise: NullArgument - ``assessment_offered_receiver`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_assessment_offered_notification_session_for_bank(self, assessment_offered_receiver=None, bank_id=None):
"""Gets the ``OsidSession`` associated with the offered assessment notification service for the given bank.
arg: assessment_offered_receiver
(osid.assessment.AssessmentOfferedReceiver): the
assessment offered receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedNotificationSession) -
a ``AssessmentOfferedNotificationSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``assessment_offered_receiver`` or
``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_assessment_offered_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_offered_receiver is None:
raise NullArgument
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_bank_session(self):
"""Gets the session for retrieving offered assessments to bank mappings.
return: (osid.assessment.AssessmentOfferedBankSession) - an
``AssessmentOfferedBankSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_bank()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_bank_session = property(fget=get_assessment_offered_bank_session)
def get_assessment_offered_bank_assignment_session(self):
"""Gets the session for assigning offered assessments to bank mappings.
return: (osid.assessment.AssessmentOfferedBankAssignmentSession)
- an ``AssessmentOfferedBankAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank_assignment()`` is ``true``.*
"""
raise Unimplemented()
assessment_offered_bank_assignment_session = property(fget=get_assessment_offered_bank_assignment_session)
def get_assessment_offered_smart_bank_session(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment offered smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentOfferedSmartBankSession) - an
``AssessmentOfferedSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service.
return: (osid.assessment.AssessmentTakenLookupSession) - an
``AssessmentTakenLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_lookup()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_lookup_session = property(fget=get_assessment_taken_lookup_session)
def get_assessment_taken_lookup_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenLookupSession) - an
``AssessmentTakenLookupSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_lookup()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken query service.
return: (osid.assessment.AssessmentTakenQuerySession) - an
``AssessmentTakenQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_query_session = property(fget=get_assessment_taken_query_session)
def get_assessment_taken_query_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment taken query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenQuerySession) - an
``AssessmentTakenQuerySession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken search service.
return: (osid.assessment.AssessmentTakenSearchSession) - an
``AssessmentTakenSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_search()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_search_session = property(fget=get_assessment_taken_search_session)
def get_assessment_taken_search_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment taken search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenSearchSession) - an
``AssessmentTakenSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_search()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken administration service.
return: (osid.assessment.AssessmentTakenAdminSession) - an
``AssessmentTakenAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_admin_session = property(fget=get_assessment_taken_admin_session)
def get_assessment_taken_admin_session_for_bank(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment taken admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenAdminSession) - an
``AssessmentTakenSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_notification_session(self, assessment_taken_receiver=None):
"""Gets the notification session for notifications pertaining to taken assessment changes.
arg: assessment_taken_receiver
(osid.assessment.AssessmentTakenReceiver): the
assessment taken receiver interface
return: (osid.assessment.AssessmentTakenNotificationSession) -
an ``AssessmentTakenNotificationSession``
raise: NullArgument - ``assessment_taken_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_assessment_taken_notification_session_for_bank(self, assessment_taken_receiver=None, bank_id=None):
"""Gets the ``OsidSession`` associated with the taken assessment notification service for the given bank.
arg: assessment_taken_receiver
(osid.assessment.AssessmentTakenReceiver): the
assessment taken receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenNotificationSession) -
an ``AssessmentTakenNotificationSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``assessment_taken_receiver`` or
``bank_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_assessment_taken_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_taken_receiver is None:
raise NullArgument
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_bank_session(self):
"""Gets the session for retrieving taken assessments to bank mappings.
return: (osid.assessment.AssessmentTakenBankSession) - an
``AssessmentTakenBankSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_bank_session = property(fget=get_assessment_taken_bank_session)
def get_assessment_taken_bank_assignment_session(self):
"""Gets the session for assigning taken assessments to bank mappings.
return: (osid.assessment.AssessmentTakenBankAssignmentSession) -
an ``AssessmentTakenBankAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank_assignment()`` is ``true``.*
"""
raise Unimplemented()
assessment_taken_bank_assignment_session = property(fget=get_assessment_taken_bank_assignment_session)
def get_assessment_taken_smart_bank_session(self, bank_id=None):
"""Gets the ``OsidSession`` associated with the assessment taken smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
return: (osid.assessment.AssessmentTakenSmartBankSession) - an
``AssessmentTakenSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None:
raise NullArgument
raise Unimplemented()
def get_bank_lookup_session(self):
"""Gets the OsidSession associated with the bank lookup service.
return: (osid.assessment.BankLookupSession) - a
``BankLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_lookup() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_lookup()`` is true.*
"""
raise Unimplemented()
bank_lookup_session = property(fget=get_bank_lookup_session)
def get_bank_query_session(self):
"""Gets the OsidSession associated with the bank query service.
return: (osid.assessment.BankQuerySession) - a
``BankQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_query() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is true.*
"""
raise Unimplemented()
bank_query_session = property(fget=get_bank_query_session)
def get_bank_search_session(self):
"""Gets the OsidSession associated with the bank search service.
return: (osid.assessment.BankSearchSession) - a
``BankSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_search() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_search()`` is true.*
"""
raise Unimplemented()
bank_search_session = property(fget=get_bank_search_session)
def get_bank_admin_session(self):
"""Gets the OsidSession associated with the bank administration service.
return: (osid.assessment.BankAdminSession) - a
``BankAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_admin()`` is true.*
"""
raise Unimplemented()
bank_admin_session = property(fget=get_bank_admin_session)
def get_bank_notification_session(self, bankreceiver=None):
"""Gets the notification session for notifications pertaining to bank service changes.
arg: bankreceiver (osid.assessment.BankReceiver): the bank
receiver interface
return: (osid.assessment.BankNotificationSession) - a
``BankNotificationSession``
raise: NullArgument - ``bank_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_notification() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_notification()`` is true.*
"""
raise Unimplemented()
def get_bank_hierarchy_session(self):
"""Gets the session traversing bank hierarchies.
return: (osid.assessment.BankHierarchySession) - a
``BankHierarchySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy()`` is true.*
"""
raise Unimplemented()
bank_hierarchy_session = property(fget=get_bank_hierarchy_session)
def get_bank_hierarchy_design_session(self):
"""Gets the session designing bank hierarchies.
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
raise Unimplemented()
bank_hierarchy_design_session = property(fget=get_bank_hierarchy_design_session)
def get_assessment_authoring_manager(self):
"""Gets an ``AssessmentAuthoringManager``.
return: (osid.assessment.authoring.AssessmentAuthoringManager) -
an ``AssessmentAuthoringManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_authoring() is
false``
*compliance: optional -- This method must be implemented if
``supports_assessment_authoring()`` is true.*
"""
raise Unimplemented()
assessment_authoring_manager = property(fget=get_assessment_authoring_manager)
def get_assessment_batch_manager(self):
"""Gets an ``AssessmentBatchManager``.
return: (osid.assessment.batch.AssessmentBatchManager) - an
``AssessmentBatchManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_batch()`` is true.*
"""
raise Unimplemented()
assessment_batch_manager = property(fget=get_assessment_batch_manager)
class AssessmentProxyManager(abc_assessment_managers.AssessmentProxyManager, osid_managers.OsidProxyManager, AssessmentProfile):
"""The assessment manager provides access to assessment sessions and provides interoperability tests for various aspects of this service.
Methods in this manager support the passing of a ``Proxy`` object.
The sessions included in this manager are:
* ``MyAssessmentTakenSession:`` a session to get taken or in
progress assessments for the current agent
* ``AssessmentSession:`` a session to be assessed and examine
assessments taken
* ``AssessmentResultsSession:`` a session to retrieve assessment
results
* ``ItemLookupSession:`` a session to look up ``Items``
* ``ItemQuerySession`` : a session to query ``Items``
* ``ItemSearchSession:`` a session to search ``Items``
* ``ItemAdminSession:`` a session to create, modify and delete
``Items``
* ``ItemNotificationSession: a`` session to receive messages
pertaining to ``Item`` changes
* ``ItemBankSession:`` a session for looking up item and bank
mappings
* ``ItemBankAssignmentSession:`` a session for managing item and
bank mappings
* ``ItemSmartBankSession:`` a session for managing dynamic banks
* ``AssessmentLookupSession:`` a session to look up
``Assessments``
* ``AssessmentQuerySession:`` a session to query ``Assessments``
* ``AssessmentSearchSession:`` a session to search ``Assessments``
* ``AssessmentAdminSession:`` a session to create, modify and
delete ``Assessments``
* ``AssessmentNotificationSession: a`` session to receive messages
pertaining to ``Assessment`` changes
* ``AssessmentBankSession:`` a session for looking up assessment
and bank mappings
* ``AssessmentBankAssignmentSession:`` a session for managing
assessment and bank mappings
* ``AssessmentSmartBankSession:`` a session for managing dynamic
banks
* ``AssessmentBasicAuthoringSession:`` a session for making simple
mappings of assessment items to assessments
* ``AssessmentOfferedLookupSession:`` a session to look up
``Assessments``
* ``AssessmentOfferedQuerySession:`` a session to query
``Assessments``
* ``AssessmentOfferedSearchSession`` : a session to search
``Assessments``
* ``AssessmentOfferedAdminSession:`` a session to create, modify
and delete ``Assessments``
* ``AssessmentOfferedNotificationSession: a`` session to receive
messages pertaining to ``Assessment`` changes
* ``AssessmentOfferedBankSession:`` a session for looking up
assessment and bank mappings
* ``AssessmentOfferedBankAssignmentSession:`` a session for
managing assessment and bank mappings
* ``AssessmentOfferedSmartBankSession`` : a session to manage
dynamic banks
* ``AssessmentTakenLookupSession:`` a session to look up
``Assessments``
* ``AssessmentTakenQuerySession:`` a session to query
``Assessments``
* ``AssessmentTakenSearchSession:`` a session to search
Assessments
* ``AssessmentTakenAdminSession:`` a session to create, modify and
delete ``AssessmentsTaken``
* ``AssessmentTakenNotificationSession: a`` session to receive
messages pertaining to ``AssessmentTaken`` changes
* ``AssessmentTakenBankSession:`` a session for looking up
assessments taken and bank mappings
* ``AssessmenttTakenBankAssignmentSession:`` a session for
managing assessments taken and bank mappings
* ``AssessmentTakenSmartBankSession:`` a session to manage dynamic
banks of assessments taken
* ``BankLookupSession:`` a session to lookup banks
* ``BankQuerySession`` : a session to query banks
* ``BankSearchSession:`` a session to search banks
* ``BankAdminSession`` : a session to create, modify and delete
banks
* ``BankNotificationSession`` : a session to receive messages
pertaining to ``Bank`` changes
* ``BankHierarchySession`` : a session to traverse the ``Bank``
hierarchy
* ``BankHierarchyDesignSession`` : a session to manage the
``Bank`` hierarchy
"""
def get_my_assessment_taken_session(self, proxy=None):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.MyAssessmentTakenSession) - a
``MyAssessmentTakenSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_my_assessment_taken()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_my_assessment_taken_session_for_bank(self, bank_id=None, proxy=None):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent for the given bank ``Id``.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.MyAssessmentTakenSession) - a
``MyAssessmentTakenSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_my_assessment_taken()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_session(self, proxy=None):
"""Gets an ``AssessmentSession`` which is responsible for taking assessments and examining responses from assessments taken.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentSession) - an assessment
session for this service
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_session_for_bank(self, bank_id=None, proxy=None):
"""Gets an ``AssessmentSession`` which is responsible for performing assessments for the given bank ``Id``.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentSession) - an assessment
session for this service
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_results_session(self, proxy=None):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentResultsSession) - an
assessment results session for this service
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_results()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_results_session_for_bank(self, bank_id=None, proxy=None):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the assessment taken
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentResultsSession) - an
assessment results session for this service
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_results()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemLookupSession) - an
``ItemLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_lookup_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemLookupSession) - ``an
_item_lookup_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemQuerySession) - an
``ItemQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_query_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemQuerySession) - ``an
_item_query_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemSearchSession) - an
``ItemSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_search_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemSearchSession) - ``an
_item_search_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``porxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemAdminSession) - an
``ItemAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_admin_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemAdminSession) - ``an
_item_admin_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_notification_session(self, item_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to item changes.
arg: item_receiver (osid.assessment.ItemReceiver): the item
receiver interface
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemNotificationSession) - an
``ItemNotificationSession``
raise: NullArgument - ``item_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_notification_session_for_bank(self, item_receiver=None, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item notification service for the given bank.
arg: item_receiver (osid.assessment.ItemReceiver): the item
receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentNotificationSession) - ``an
_item_notification_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``item_receiver, bank_id`` or ``proxy``
is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_item_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if item_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_item_bank_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item banking service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemBankSession) - an
``ItemBankSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_bank_assignment_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the item bank assignment service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemBankAssignmentSession) - an
``ItemBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_item_smart_bank_session(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the item smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.ItemSmartBankSession) - an
``ItemSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_item_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_item_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentLookupSession) - an
``AssessmentLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_lookup_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentLookupSession) - ``an
_assessment_lookup_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentQuerySession) - an
``AssessmentQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_query_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentQuerySession) - ``an
_assessment_query_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentSearchSession) - an
``AssessmentSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_search_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentSearchSession) - ``an
_assessment_search_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentAdminSession) - an
``AssessmentAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_admin_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentAdminSession) - ``an
_assessment_admin_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_notification_session(self, assessment_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to assessment changes.
arg: assessment_receiver
(osid.assessment.AssessmentReceiver): the assessment
receiver interface
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentNotificationSession) - an
``AssessmentNotificationSession``
raise: NullArgument - ``assessment_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_notification_session_for_bank(self, assessment_receiver=None, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment notification service for the given bank.
arg: assessment_receiver
(osid.assessment.AssessmentReceiver): the assessment
receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentNotificationSession) - ``an
_assessment_notification_session``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``assessment_receiver, bank_id`` or
``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_notification()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_bank_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment banking service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentBankSession) - an
``AssessmentBankSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_bank_assignment_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment bank assignment service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentBankAssignmentSession) - an
``AssessmentBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_smart_bank_session(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentSmartBankSession) - an
``AssessmentSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_basic_authoring_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment authoring service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentBasicAuthoringSession) - an
``AssessmentBasicAuthoringSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_basic_authoring()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_basic_authoring_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment authoring service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentBasicAuthoringSession) - an
``AssessmentBasicAuthoringSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_basic_authoring()`` or
``supports_visibe_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedLookupSession) - an
``AssessmentOfferedLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_lookup()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_lookup_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedLookupSession) - an
``AssessmentOfferedLookupSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_lookup()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedQuerySession) - an
``AssessmentOfferedQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_query_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedQuerySession) - an
``AssessmentOfferedQuerySession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_query()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedSearchSession) - an
``AssessmentOfferedSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_search()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_search_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedSearchSession) - an
``AssessmentOfferedSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or proxy is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_search()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedAdminSession) - an
``AssessmentOfferedAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_admin()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_admin_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedAdminSession) - an
``AssessmentOfferedAdminSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_offered_admin()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_notification_session(self, assessment_offered_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to offered assessment changes.
arg: assessment_offered_receiver
(osid.assessment.AssessmentOfferedReceiver): the
assessment offered receiver interface
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedNotificationSession) -
an ``AssessmentOfferedNotificationSession``
raise: NullArgument - ``assessment_offered_receiver`` or
``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_notification_session_for_bank(self, assessment_offered_receiver=None, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the offered assessment notification service for the given bank.
arg: assessment_offered_receiver
(osid.assessment.AssessmentOfferedReceiver): the
assessment offered receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedNotificationSession) -
a ``AssessmentOfferedNotificationSession``
raise: NotFound - ``bank_id`` or ``proxy`` not found
raise: NullArgument - ``assessment_offered_receiver, bank_id``
or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_assessment_offered_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_offered_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_offered_bank_session(self, proxy=None):
"""Gets the session for retrieving offered assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedBankSession) - an
``AssessmentOfferedBankSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_offered_bank()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_bank_assignment_session(self, proxy=None):
"""Gets the session for assigning offered assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedBankAssignmentSession)
- an ``AssessmentOfferedBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_offered_smart_bank_session(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment offered smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentOfferedSmartBankSession) - an
``AssessmentOfferedSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_offered_smart_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenLookupSession) - an
``AssessmentTakenLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_lookup()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_lookup_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenLookupSession) - an
``AssessmentTakenLookupSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_lookup()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenQuerySession) - an
``AssessmentTakenQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_query_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken query service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenQuerySession) - an
``AssessmentTakenQuerySession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenSearchSession) - an
``AssessmentTakenSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_search()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_search_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken search service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenSearchSession) - an
``AssessmentTakenSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_search()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenAdminSession) - an
``AssessmentTakenAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_admin_session_for_bank(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenAdminSession) - an
``AssessmentTakenSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if bank_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_notification_session(self, assessment_taken_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to taken assessment changes.
arg: assessment_taken_receiver
(osid.assessment.AssessmentTakenReceiver): the
assessment taken receiver interface
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenNotificationSession) -
an ``AssessmentTakenNotificationSession``
raise: NullArgument - ``assessment_taken_receiver`` or
``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_notification_session_for_bank(self, assessment_taken_receiver=None, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the taken assessment notification service for the given bank.
arg: assessment_taken_receiver
(osid.assessment.AssessmentTakenReceiver): the
assessment taken receiver interface
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenNotificationSession) -
an ``AssessmentTakenNotificationSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``assessment_taken_receiver, bank_id`` or
``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_assessment_taken_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
if assessment_taken_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_assessment_taken_bank_session(self, proxy=None):
"""Gets the session for retrieving taken assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenBankSession) - an
``AssessmentTakenBankSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_taken_bank()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_bank_assignment_session(self, proxy=None):
"""Gets the session for assigning taken assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenBankAssignmentSession) -
an ``AssessmentTakenBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_bank_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_taken_smart_bank_session(self, bank_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the assessment taken smart banking service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenSmartBankSession) - an
``AssessmentTakenSmartBankSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_assessment_taken_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_lookup_session(self, proxy=None):
"""Gets the OsidSession associated with the bank lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankLookupSession) - a
``BankLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_lookup() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_lookup()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_query_session(self, proxy=None):
"""Gets the OsidSession associated with the bank query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankQuerySession) - a
``BankQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_query() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_search_session(self, proxy=None):
"""Gets the OsidSession associated with the bank search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankSearchSession) - a
``BankSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_search() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_search()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_admin_session(self, proxy=None):
"""Gets the OsidSession associated with the bank administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankAdminSession) - a
``BankAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_admin()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_notification_session(self, bank_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to bank service changes.
arg: bank_receiver (osid.assessment.BankReceiver): the bank
receiver interface
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankNotificationSession) - a
``BankNotificationSession``
raise: NullArgument - ``bank_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_notification() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_notification()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_hierarchy_session(self, proxy=None):
"""Gets the session traversing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchySession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_bank_hierarchy_design_session(self, proxy=None):
"""Gets the session designing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_assessment_authoring_proxy_manager(self):
"""Gets an ``AssessmentAuthoringProxyManager``.
return:
(osid.assessment.authoring.AssessmentAuthoringProxyManag
er) - an ``AssessmentAuthoringProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_authoring() is
false``
*compliance: optional -- This method must be implemented if
``supports_assessment_authoring()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
assessment_authoring_proxy_manager = property(fget=get_assessment_authoring_proxy_manager)
def get_assessment_batch_proxy_manager(self):
"""Gets an ``AssessmentBatchProxyManager``.
return: (osid.assessment.batch.AssessmentBatchProxyManager) - an
``AssessmentBatchProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_batch()`` is true.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
assessment_batch_proxy_manager = property(fget=get_assessment_batch_proxy_manager)
| 42.338079 | 141 | 0.639959 | 15,469 | 151,655 | 6.109445 | 0.016549 | 0.052568 | 0.030516 | 0.034876 | 0.973473 | 0.951686 | 0.919667 | 0.867184 | 0.850223 | 0.82905 | 0 | 0 | 0.263981 | 151,655 | 3,581 | 142 | 42.349902 | 0.846678 | 0.677901 | 0 | 0.593074 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.297258 | false | 0 | 0.007215 | 0 | 0.477633 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8688f8e3aaa2b41ca57d7892ec4e8367c72763e5 | 177,962 | py | Python | tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.aiplatform_v1beta1.services.vizier_service import (
VizierServiceAsyncClient,
)
from google.cloud.aiplatform_v1beta1.services.vizier_service import VizierServiceClient
from google.cloud.aiplatform_v1beta1.services.vizier_service import pagers
from google.cloud.aiplatform_v1beta1.services.vizier_service import transports
from google.cloud.aiplatform_v1beta1.types import study
from google.cloud.aiplatform_v1beta1.types import study as gca_study
from google.cloud.aiplatform_v1beta1.types import vizier_service
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert VizierServiceClient._get_default_mtls_endpoint(None) is None
assert (
VizierServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
VizierServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
VizierServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
VizierServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
VizierServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
)
@pytest.mark.parametrize(
"client_class,transport_name",
[
(VizierServiceClient, "grpc"),
(VizierServiceAsyncClient, "grpc_asyncio"),
],
)
def test_vizier_service_client_from_service_account_info(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("aiplatform.googleapis.com:443")
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.VizierServiceGrpcTransport, "grpc"),
(transports.VizierServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_vizier_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class,transport_name",
[
(VizierServiceClient, "grpc"),
(VizierServiceAsyncClient, "grpc_asyncio"),
],
)
def test_vizier_service_client_from_service_account_file(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("aiplatform.googleapis.com:443")
def test_vizier_service_client_get_transport_class():
transport = VizierServiceClient.get_transport_class()
available_transports = [
transports.VizierServiceGrpcTransport,
]
assert transport in available_transports
transport = VizierServiceClient.get_transport_class("grpc")
assert transport == transports.VizierServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(VizierServiceClient, transports.VizierServiceGrpcTransport, "grpc"),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
VizierServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceClient),
)
@mock.patch.object(
VizierServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceAsyncClient),
)
def test_vizier_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(VizierServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(VizierServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(VizierServiceClient, transports.VizierServiceGrpcTransport, "grpc", "true"),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(VizierServiceClient, transports.VizierServiceGrpcTransport, "grpc", "false"),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
VizierServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceClient),
)
@mock.patch.object(
VizierServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_vizier_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class", [VizierServiceClient, VizierServiceAsyncClient]
)
@mock.patch.object(
VizierServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceClient),
)
@mock.patch.object(
VizierServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VizierServiceAsyncClient),
)
def test_vizier_service_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(VizierServiceClient, transports.VizierServiceGrpcTransport, "grpc"),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_vizier_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
VizierServiceClient,
transports.VizierServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_vizier_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_vizier_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.aiplatform_v1beta1.services.vizier_service.transports.VizierServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = VizierServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
VizierServiceClient,
transports.VizierServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
VizierServiceAsyncClient,
transports.VizierServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_vizier_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"aiplatform.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=None,
default_host="aiplatform.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.CreateStudyRequest,
dict,
],
)
def test_create_study(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_study.Study(
name="name_value",
display_name="display_name_value",
state=gca_study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
response = client.create_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
def test_create_study_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
client.create_study()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateStudyRequest()
@pytest.mark.asyncio
async def test_create_study_async(
transport: str = "grpc_asyncio", request_type=vizier_service.CreateStudyRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_study.Study(
name="name_value",
display_name="display_name_value",
state=gca_study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
)
response = await client.create_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
@pytest.mark.asyncio
async def test_create_study_async_from_dict():
await test_create_study_async(request_type=dict)
def test_create_study_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CreateStudyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
call.return_value = gca_study.Study()
client.create_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_study_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CreateStudyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_study.Study())
await client.create_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_create_study_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_study.Study()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_study(
parent="parent_value",
study=gca_study.Study(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].study
mock_val = gca_study.Study(name="name_value")
assert arg == mock_val
def test_create_study_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_study(
vizier_service.CreateStudyRequest(),
parent="parent_value",
study=gca_study.Study(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_study_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_study.Study()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_study.Study())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_study(
parent="parent_value",
study=gca_study.Study(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].study
mock_val = gca_study.Study(name="name_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_study_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_study(
vizier_service.CreateStudyRequest(),
parent="parent_value",
study=gca_study.Study(name="name_value"),
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.GetStudyRequest,
dict,
],
)
def test_get_study(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study(
name="name_value",
display_name="display_name_value",
state=study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
response = client.get_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
def test_get_study_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
client.get_study()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetStudyRequest()
@pytest.mark.asyncio
async def test_get_study_async(
transport: str = "grpc_asyncio", request_type=vizier_service.GetStudyRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Study(
name="name_value",
display_name="display_name_value",
state=study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
)
response = await client.get_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
@pytest.mark.asyncio
async def test_get_study_async_from_dict():
await test_get_study_async(request_type=dict)
def test_get_study_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.GetStudyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
call.return_value = study.Study()
client.get_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_study_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.GetStudyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Study())
await client.get_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
def test_get_study_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_study(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_study_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_study(
vizier_service.GetStudyRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_study_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Study())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_study(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_study_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_study(
vizier_service.GetStudyRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.ListStudiesRequest,
dict,
],
)
def test_list_studies(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListStudiesResponse(
next_page_token="next_page_token_value",
)
response = client.list_studies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListStudiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListStudiesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_studies_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
client.list_studies()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListStudiesRequest()
@pytest.mark.asyncio
async def test_list_studies_async(
transport: str = "grpc_asyncio", request_type=vizier_service.ListStudiesRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListStudiesResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_studies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListStudiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListStudiesAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_studies_async_from_dict():
await test_list_studies_async(request_type=dict)
def test_list_studies_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListStudiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
call.return_value = vizier_service.ListStudiesResponse()
client.list_studies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_studies_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListStudiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListStudiesResponse()
)
await client.list_studies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_list_studies_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListStudiesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_studies(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_studies_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_studies(
vizier_service.ListStudiesRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_studies_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListStudiesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListStudiesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_studies(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_studies_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_studies(
vizier_service.ListStudiesRequest(),
parent="parent_value",
)
def test_list_studies_pager(transport_name: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
study.Study(),
],
next_page_token="abc",
),
vizier_service.ListStudiesResponse(
studies=[],
next_page_token="def",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
],
next_page_token="ghi",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_studies(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, study.Study) for i in results)
def test_list_studies_pages(transport_name: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_studies), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
study.Study(),
],
next_page_token="abc",
),
vizier_service.ListStudiesResponse(
studies=[],
next_page_token="def",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
],
next_page_token="ghi",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
],
),
RuntimeError,
)
pages = list(client.list_studies(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_studies_async_pager():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_studies), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
study.Study(),
],
next_page_token="abc",
),
vizier_service.ListStudiesResponse(
studies=[],
next_page_token="def",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
],
next_page_token="ghi",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
],
),
RuntimeError,
)
async_pager = await client.list_studies(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, study.Study) for i in responses)
@pytest.mark.asyncio
async def test_list_studies_async_pages():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_studies), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
study.Study(),
],
next_page_token="abc",
),
vizier_service.ListStudiesResponse(
studies=[],
next_page_token="def",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
],
next_page_token="ghi",
),
vizier_service.ListStudiesResponse(
studies=[
study.Study(),
study.Study(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_studies(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
vizier_service.DeleteStudyRequest,
dict,
],
)
def test_delete_study(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteStudyRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_study_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
client.delete_study()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteStudyRequest()
@pytest.mark.asyncio
async def test_delete_study_async(
transport: str = "grpc_asyncio", request_type=vizier_service.DeleteStudyRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteStudyRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_study_async_from_dict():
await test_delete_study_async(request_type=dict)
def test_delete_study_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.DeleteStudyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
call.return_value = None
client.delete_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_study_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.DeleteStudyRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
def test_delete_study_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_study(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_study_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_study(
vizier_service.DeleteStudyRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_study_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_study(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_study_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_study(
vizier_service.DeleteStudyRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.LookupStudyRequest,
dict,
],
)
def test_lookup_study(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study(
name="name_value",
display_name="display_name_value",
state=study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
response = client.lookup_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.LookupStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
def test_lookup_study_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
client.lookup_study()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.LookupStudyRequest()
@pytest.mark.asyncio
async def test_lookup_study_async(
transport: str = "grpc_asyncio", request_type=vizier_service.LookupStudyRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Study(
name="name_value",
display_name="display_name_value",
state=study.Study.State.ACTIVE,
inactive_reason="inactive_reason_value",
)
)
response = await client.lookup_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.LookupStudyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Study)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == study.Study.State.ACTIVE
assert response.inactive_reason == "inactive_reason_value"
@pytest.mark.asyncio
async def test_lookup_study_async_from_dict():
await test_lookup_study_async(request_type=dict)
def test_lookup_study_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.LookupStudyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
call.return_value = study.Study()
client.lookup_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_lookup_study_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.LookupStudyRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Study())
await client.lookup_study(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_lookup_study_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.lookup_study(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_lookup_study_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.lookup_study(
vizier_service.LookupStudyRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_lookup_study_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.lookup_study), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Study()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Study())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.lookup_study(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_lookup_study_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.lookup_study(
vizier_service.LookupStudyRequest(),
parent="parent_value",
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.SuggestTrialsRequest,
dict,
],
)
def test_suggest_trials(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.suggest_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.suggest_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.SuggestTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_suggest_trials_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.suggest_trials), "__call__") as call:
client.suggest_trials()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.SuggestTrialsRequest()
@pytest.mark.asyncio
async def test_suggest_trials_async(
transport: str = "grpc_asyncio", request_type=vizier_service.SuggestTrialsRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.suggest_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.suggest_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.SuggestTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_suggest_trials_async_from_dict():
await test_suggest_trials_async(request_type=dict)
def test_suggest_trials_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.SuggestTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.suggest_trials), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.suggest_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_suggest_trials_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.SuggestTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.suggest_trials), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.suggest_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.parametrize(
"request_type",
[
vizier_service.CreateTrialRequest,
dict,
],
)
def test_create_trial(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
response = client.create_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
def test_create_trial_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
client.create_trial()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateTrialRequest()
@pytest.mark.asyncio
async def test_create_trial_async(
transport: str = "grpc_asyncio", request_type=vizier_service.CreateTrialRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
)
response = await client.create_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CreateTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
@pytest.mark.asyncio
async def test_create_trial_async_from_dict():
await test_create_trial_async(request_type=dict)
def test_create_trial_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CreateTrialRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
call.return_value = study.Trial()
client.create_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_trial_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CreateTrialRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
await client.create_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_create_trial_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_trial(
parent="parent_value",
trial=study.Trial(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].trial
mock_val = study.Trial(name="name_value")
assert arg == mock_val
def test_create_trial_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_trial(
vizier_service.CreateTrialRequest(),
parent="parent_value",
trial=study.Trial(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_trial_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_trial(
parent="parent_value",
trial=study.Trial(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].trial
mock_val = study.Trial(name="name_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_trial_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_trial(
vizier_service.CreateTrialRequest(),
parent="parent_value",
trial=study.Trial(name="name_value"),
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.GetTrialRequest,
dict,
],
)
def test_get_trial(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
response = client.get_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
def test_get_trial_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
client.get_trial()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetTrialRequest()
@pytest.mark.asyncio
async def test_get_trial_async(
transport: str = "grpc_asyncio", request_type=vizier_service.GetTrialRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
)
response = await client.get_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.GetTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
@pytest.mark.asyncio
async def test_get_trial_async_from_dict():
await test_get_trial_async(request_type=dict)
def test_get_trial_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.GetTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
call.return_value = study.Trial()
client.get_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_trial_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.GetTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
await client.get_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
def test_get_trial_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_trial(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_trial_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_trial(
vizier_service.GetTrialRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_trial_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_trial(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_trial_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_trial(
vizier_service.GetTrialRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.ListTrialsRequest,
dict,
],
)
def test_list_trials(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListTrialsResponse(
next_page_token="next_page_token_value",
)
response = client.list_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTrialsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_trials_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
client.list_trials()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListTrialsRequest()
@pytest.mark.asyncio
async def test_list_trials_async(
transport: str = "grpc_asyncio", request_type=vizier_service.ListTrialsRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListTrialsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTrialsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_trials_async_from_dict():
await test_list_trials_async(request_type=dict)
def test_list_trials_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
call.return_value = vizier_service.ListTrialsResponse()
client.list_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_trials_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListTrialsResponse()
)
await client.list_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_list_trials_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListTrialsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_trials(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_trials_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_trials(
vizier_service.ListTrialsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_trials_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListTrialsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListTrialsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_trials(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_trials_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_trials(
vizier_service.ListTrialsRequest(),
parent="parent_value",
)
def test_list_trials_pager(transport_name: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
study.Trial(),
],
next_page_token="abc",
),
vizier_service.ListTrialsResponse(
trials=[],
next_page_token="def",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
],
next_page_token="ghi",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_trials(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, study.Trial) for i in results)
def test_list_trials_pages(transport_name: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_trials), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
study.Trial(),
],
next_page_token="abc",
),
vizier_service.ListTrialsResponse(
trials=[],
next_page_token="def",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
],
next_page_token="ghi",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
],
),
RuntimeError,
)
pages = list(client.list_trials(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_trials_async_pager():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_trials), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
study.Trial(),
],
next_page_token="abc",
),
vizier_service.ListTrialsResponse(
trials=[],
next_page_token="def",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
],
next_page_token="ghi",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
],
),
RuntimeError,
)
async_pager = await client.list_trials(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, study.Trial) for i in responses)
@pytest.mark.asyncio
async def test_list_trials_async_pages():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_trials), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
study.Trial(),
],
next_page_token="abc",
),
vizier_service.ListTrialsResponse(
trials=[],
next_page_token="def",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
],
next_page_token="ghi",
),
vizier_service.ListTrialsResponse(
trials=[
study.Trial(),
study.Trial(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_trials(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
vizier_service.AddTrialMeasurementRequest,
dict,
],
)
def test_add_trial_measurement(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_trial_measurement), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
response = client.add_trial_measurement(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.AddTrialMeasurementRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
def test_add_trial_measurement_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_trial_measurement), "__call__"
) as call:
client.add_trial_measurement()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.AddTrialMeasurementRequest()
@pytest.mark.asyncio
async def test_add_trial_measurement_async(
transport: str = "grpc_asyncio",
request_type=vizier_service.AddTrialMeasurementRequest,
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_trial_measurement), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
)
response = await client.add_trial_measurement(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.AddTrialMeasurementRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
@pytest.mark.asyncio
async def test_add_trial_measurement_async_from_dict():
await test_add_trial_measurement_async(request_type=dict)
def test_add_trial_measurement_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.AddTrialMeasurementRequest()
request.trial_name = "trial_name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_trial_measurement), "__call__"
) as call:
call.return_value = study.Trial()
client.add_trial_measurement(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"trial_name=trial_name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_add_trial_measurement_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.AddTrialMeasurementRequest()
request.trial_name = "trial_name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_trial_measurement), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
await client.add_trial_measurement(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"trial_name=trial_name/value",
) in kw["metadata"]
@pytest.mark.parametrize(
"request_type",
[
vizier_service.CompleteTrialRequest,
dict,
],
)
def test_complete_trial(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.complete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
response = client.complete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CompleteTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
def test_complete_trial_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.complete_trial), "__call__") as call:
client.complete_trial()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CompleteTrialRequest()
@pytest.mark.asyncio
async def test_complete_trial_async(
transport: str = "grpc_asyncio", request_type=vizier_service.CompleteTrialRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.complete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
)
response = await client.complete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CompleteTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
@pytest.mark.asyncio
async def test_complete_trial_async_from_dict():
await test_complete_trial_async(request_type=dict)
def test_complete_trial_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CompleteTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.complete_trial), "__call__") as call:
call.return_value = study.Trial()
client.complete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_complete_trial_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CompleteTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.complete_trial), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
await client.complete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.parametrize(
"request_type",
[
vizier_service.DeleteTrialRequest,
dict,
],
)
def test_delete_trial(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteTrialRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_trial_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
client.delete_trial()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteTrialRequest()
@pytest.mark.asyncio
async def test_delete_trial_async(
transport: str = "grpc_asyncio", request_type=vizier_service.DeleteTrialRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.DeleteTrialRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_trial_async_from_dict():
await test_delete_trial_async(request_type=dict)
def test_delete_trial_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.DeleteTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
call.return_value = None
client.delete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_trial_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.DeleteTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
def test_delete_trial_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_trial(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_trial_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_trial(
vizier_service.DeleteTrialRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_trial_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_trial(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_trial_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_trial(
vizier_service.DeleteTrialRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
vizier_service.CheckTrialEarlyStoppingStateRequest,
dict,
],
)
def test_check_trial_early_stopping_state(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.check_trial_early_stopping_state), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.check_trial_early_stopping_state(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CheckTrialEarlyStoppingStateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_check_trial_early_stopping_state_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.check_trial_early_stopping_state), "__call__"
) as call:
client.check_trial_early_stopping_state()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CheckTrialEarlyStoppingStateRequest()
@pytest.mark.asyncio
async def test_check_trial_early_stopping_state_async(
transport: str = "grpc_asyncio",
request_type=vizier_service.CheckTrialEarlyStoppingStateRequest,
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.check_trial_early_stopping_state), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.check_trial_early_stopping_state(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.CheckTrialEarlyStoppingStateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_check_trial_early_stopping_state_async_from_dict():
await test_check_trial_early_stopping_state_async(request_type=dict)
def test_check_trial_early_stopping_state_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CheckTrialEarlyStoppingStateRequest()
request.trial_name = "trial_name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.check_trial_early_stopping_state), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.check_trial_early_stopping_state(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"trial_name=trial_name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_check_trial_early_stopping_state_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.CheckTrialEarlyStoppingStateRequest()
request.trial_name = "trial_name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.check_trial_early_stopping_state), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.check_trial_early_stopping_state(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"trial_name=trial_name/value",
) in kw["metadata"]
@pytest.mark.parametrize(
"request_type",
[
vizier_service.StopTrialRequest,
dict,
],
)
def test_stop_trial(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
response = client.stop_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.StopTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
def test_stop_trial_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_trial), "__call__") as call:
client.stop_trial()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.StopTrialRequest()
@pytest.mark.asyncio
async def test_stop_trial_async(
transport: str = "grpc_asyncio", request_type=vizier_service.StopTrialRequest
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_trial), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
study.Trial(
name="name_value",
id="id_value",
state=study.Trial.State.REQUESTED,
client_id="client_id_value",
infeasible_reason="infeasible_reason_value",
custom_job="custom_job_value",
)
)
response = await client.stop_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.StopTrialRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, study.Trial)
assert response.name == "name_value"
assert response.id == "id_value"
assert response.state == study.Trial.State.REQUESTED
assert response.client_id == "client_id_value"
assert response.infeasible_reason == "infeasible_reason_value"
assert response.custom_job == "custom_job_value"
@pytest.mark.asyncio
async def test_stop_trial_async_from_dict():
await test_stop_trial_async(request_type=dict)
def test_stop_trial_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.StopTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_trial), "__call__") as call:
call.return_value = study.Trial()
client.stop_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_stop_trial_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.StopTrialRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.stop_trial), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(study.Trial())
await client.stop_trial(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name/value",
) in kw["metadata"]
@pytest.mark.parametrize(
"request_type",
[
vizier_service.ListOptimalTrialsRequest,
dict,
],
)
def test_list_optimal_trials(request_type, transport: str = "grpc"):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListOptimalTrialsResponse()
response = client.list_optimal_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListOptimalTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, vizier_service.ListOptimalTrialsResponse)
def test_list_optimal_trials_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
client.list_optimal_trials()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListOptimalTrialsRequest()
@pytest.mark.asyncio
async def test_list_optimal_trials_async(
transport: str = "grpc_asyncio",
request_type=vizier_service.ListOptimalTrialsRequest,
):
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListOptimalTrialsResponse()
)
response = await client.list_optimal_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == vizier_service.ListOptimalTrialsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, vizier_service.ListOptimalTrialsResponse)
@pytest.mark.asyncio
async def test_list_optimal_trials_async_from_dict():
await test_list_optimal_trials_async(request_type=dict)
def test_list_optimal_trials_field_headers():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListOptimalTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
call.return_value = vizier_service.ListOptimalTrialsResponse()
client.list_optimal_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_optimal_trials_field_headers_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = vizier_service.ListOptimalTrialsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListOptimalTrialsResponse()
)
await client.list_optimal_trials(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent/value",
) in kw["metadata"]
def test_list_optimal_trials_flattened():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListOptimalTrialsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_optimal_trials(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_optimal_trials_flattened_error():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_optimal_trials(
vizier_service.ListOptimalTrialsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_optimal_trials_flattened_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_optimal_trials), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = vizier_service.ListOptimalTrialsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
vizier_service.ListOptimalTrialsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_optimal_trials(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_optimal_trials_flattened_error_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_optimal_trials(
vizier_service.ListOptimalTrialsRequest(),
parent="parent_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VizierServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = VizierServiceClient(
client_options=options,
transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = VizierServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VizierServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = VizierServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.VizierServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.VizierServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.VizierServiceGrpcTransport,
transports.VizierServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
],
)
def test_transport_kind(transport_name):
transport = VizierServiceClient.get_transport_class(transport_name)(
credentials=ga_credentials.AnonymousCredentials(),
)
assert transport.kind == transport_name
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.VizierServiceGrpcTransport,
)
def test_vizier_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.VizierServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_vizier_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.aiplatform_v1beta1.services.vizier_service.transports.VizierServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.VizierServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"create_study",
"get_study",
"list_studies",
"delete_study",
"lookup_study",
"suggest_trials",
"create_trial",
"get_trial",
"list_trials",
"add_trial_measurement",
"complete_trial",
"delete_trial",
"check_trial_early_stopping_state",
"stop_trial",
"list_optimal_trials",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
# Catch all for all remaining methods and properties
remainder = [
"kind",
]
for r in remainder:
with pytest.raises(NotImplementedError):
getattr(transport, r)()
def test_vizier_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.aiplatform_v1beta1.services.vizier_service.transports.VizierServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VizierServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_vizier_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.aiplatform_v1beta1.services.vizier_service.transports.VizierServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VizierServiceTransport()
adc.assert_called_once()
def test_vizier_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
VizierServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VizierServiceGrpcTransport,
transports.VizierServiceGrpcAsyncIOTransport,
],
)
def test_vizier_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.VizierServiceGrpcTransport, grpc_helpers),
(transports.VizierServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_vizier_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"aiplatform.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="aiplatform.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VizierServiceGrpcTransport,
transports.VizierServiceGrpcAsyncIOTransport,
],
)
def test_vizier_service_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_vizier_service_host_no_port(transport_name):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="aiplatform.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == ("aiplatform.googleapis.com:443")
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_vizier_service_host_with_port(transport_name):
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="aiplatform.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == ("aiplatform.googleapis.com:8000")
def test_vizier_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VizierServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_vizier_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VizierServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VizierServiceGrpcTransport,
transports.VizierServiceGrpcAsyncIOTransport,
],
)
def test_vizier_service_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VizierServiceGrpcTransport,
transports.VizierServiceGrpcAsyncIOTransport,
],
)
def test_vizier_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_vizier_service_grpc_lro_client():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_vizier_service_grpc_lro_async_client():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsAsyncClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_custom_job_path():
project = "squid"
location = "clam"
custom_job = "whelk"
expected = "projects/{project}/locations/{location}/customJobs/{custom_job}".format(
project=project,
location=location,
custom_job=custom_job,
)
actual = VizierServiceClient.custom_job_path(project, location, custom_job)
assert expected == actual
def test_parse_custom_job_path():
expected = {
"project": "octopus",
"location": "oyster",
"custom_job": "nudibranch",
}
path = VizierServiceClient.custom_job_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_custom_job_path(path)
assert expected == actual
def test_study_path():
project = "cuttlefish"
location = "mussel"
study = "winkle"
expected = "projects/{project}/locations/{location}/studies/{study}".format(
project=project,
location=location,
study=study,
)
actual = VizierServiceClient.study_path(project, location, study)
assert expected == actual
def test_parse_study_path():
expected = {
"project": "nautilus",
"location": "scallop",
"study": "abalone",
}
path = VizierServiceClient.study_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_study_path(path)
assert expected == actual
def test_trial_path():
project = "squid"
location = "clam"
study = "whelk"
trial = "octopus"
expected = (
"projects/{project}/locations/{location}/studies/{study}/trials/{trial}".format(
project=project,
location=location,
study=study,
trial=trial,
)
)
actual = VizierServiceClient.trial_path(project, location, study, trial)
assert expected == actual
def test_parse_trial_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"study": "cuttlefish",
"trial": "mussel",
}
path = VizierServiceClient.trial_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_trial_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "winkle"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = VizierServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nautilus",
}
path = VizierServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "scallop"
expected = "folders/{folder}".format(
folder=folder,
)
actual = VizierServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "abalone",
}
path = VizierServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "squid"
expected = "organizations/{organization}".format(
organization=organization,
)
actual = VizierServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "clam",
}
path = VizierServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "whelk"
expected = "projects/{project}".format(
project=project,
)
actual = VizierServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "octopus",
}
path = VizierServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "oyster"
location = "nudibranch"
expected = "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
actual = VizierServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "cuttlefish",
"location": "mussel",
}
path = VizierServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = VizierServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.VizierServiceTransport, "_prep_wrapped_messages"
) as prep:
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.VizierServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = VizierServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = VizierServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = VizierServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(VizierServiceClient, transports.VizierServiceGrpcTransport),
(VizierServiceAsyncClient, transports.VizierServiceGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| 35.401233 | 122 | 0.668412 | 20,473 | 177,962 | 5.580374 | 0.022762 | 0.024578 | 0.023327 | 0.056229 | 0.934125 | 0.907105 | 0.889293 | 0.86594 | 0.854955 | 0.836039 | 0 | 0.003523 | 0.248677 | 177,962 | 5,026 | 123 | 35.408277 | 0.850935 | 0.202493 | 0 | 0.714446 | 0 | 0 | 0.081954 | 0.032907 | 0 | 0 | 0 | 0 | 0.129005 | 1 | 0.035132 | false | 0.000281 | 0.008994 | 0.000562 | 0.044688 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86d9b5893d02bd5ca66d62a712f218a868acc65e | 43 | py | Python | tests/parser/good/plus-minus.py | Nakrez/RePy | 057db55a99eac2c5cb3d622fa1f2e29f6083d8d6 | [
"MIT"
] | 1 | 2020-11-24T05:24:26.000Z | 2020-11-24T05:24:26.000Z | tests/parser/good/plus-minus.py | Nakrez/RePy | 057db55a99eac2c5cb3d622fa1f2e29f6083d8d6 | [
"MIT"
] | null | null | null | tests/parser/good/plus-minus.py | Nakrez/RePy | 057db55a99eac2c5cb3d622fa1f2e29f6083d8d6 | [
"MIT"
] | null | null | null | 1-2+1-2+3--4++++--4+4-5-6+6-3+4--5+-++++-5
| 21.5 | 42 | 0.348837 | 15 | 43 | 1 | 0.4 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.357143 | 0.023256 | 43 | 1 | 43 | 43 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
86ea52bcf1c4225f28cb6e967efd803ccfb25502 | 5,845 | py | Python | parser/testing/basic_subexpression_tests.py | zhangshyue/regex-library | 69a26b580bcc94f95dda3536cd790fb59c81a31b | [
"MIT"
] | null | null | null | parser/testing/basic_subexpression_tests.py | zhangshyue/regex-library | 69a26b580bcc94f95dda3536cd790fb59c81a31b | [
"MIT"
] | null | null | null | parser/testing/basic_subexpression_tests.py | zhangshyue/regex-library | 69a26b580bcc94f95dda3536cd790fb59c81a31b | [
"MIT"
] | null | null | null |
# Builtin imports
import unittest
import logging
# Internal imports
import root_pb2
from testing.test_utils import PatternTest
import logconf
class BasicSubexpressionTests(unittest.TestCase,
PatternTest):
patterns = {
"{.*}": PatternTest.gen_root(root_pb2.Expression(
raw='{.*}',
tokens=[
# Add tokens here
root_pb2.Token(
token="{",
type=root_pb2.TokenType.Character,
character="{"
),
root_pb2.Token(
token=".",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.Dot
),
root_pb2.Token(
token="*",
type=root_pb2.TokenType.QuantifierModifier,
quantifiermodifier=root_pb2.QuantifierModifierType.Star
),
root_pb2.Token(
token="}",
type=root_pb2.TokenType.Character,
character="}"
),
],
expressions=[
root_pb2.Expression(
raw="{",
tokens=[
root_pb2.Token(
token="{",
type=root_pb2.TokenType.Character,
character="{"
),
]
),
root_pb2.Expression(
raw=".*",
tokens=[
root_pb2.Token(
token=".",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.Dot
),
root_pb2.Token(
token="*",
type=root_pb2.TokenType.QuantifierModifier,
quantifiermodifier=root_pb2.QuantifierModifierType.Star
),
]
),
root_pb2.Expression(
raw="}",
tokens=[
root_pb2.Token(
token="}",
type=root_pb2.TokenType.Character,
character="}"
),
]
),
]
)),
r"/[\d\.]/": PatternTest.gen_root(root_pb2.Expression(
raw=r'/[\d\.]/',
tokens=[
# Add tokens here
root_pb2.Token(
token=r"/",
type=root_pb2.TokenType.Character,
character="/"
),
root_pb2.Token(
token="[",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.OpenSet
),
root_pb2.Token(
token=r"\d",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.Digit
),
root_pb2.Token(
token=r"\.",
type=root_pb2.TokenType.Escape,
escape=root_pb2.EscapeType.Reserved
),
root_pb2.Token(
token="]",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.CloseSet
),
root_pb2.Token(
token="/",
type=root_pb2.TokenType.Character,
character="/"
),
],
expressions=[
root_pb2.Expression(
raw='/',
tokens=[
root_pb2.Token(
token=r"/",
type=root_pb2.TokenType.Anchor,
anchor=root_pb2.AnchorType.ForwardSlash
),
]
),
root_pb2.Expression(
raw=r'[\d\.]',
tokens=[
root_pb2.Token(
token="[",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.OpenSet
),
root_pb2.Token(
token=r"\d",
type=root_pb2.TokenType.Character,
characterclass=root_pb2.CharacterClassType.Digit
),
root_pb2.Token(
token=r"\.",
type=root_pb2.TokenType.Escape,
characterclass=root_pb2.EscapeType.Reserved
),
root_pb2.Token(
token="]",
type=root_pb2.TokenType.CharacterClass,
characterclass=root_pb2.CharacterClassType.CloseSet
),
]
),
root_pb2.Expression(
raw='/',
tokens=[
root_pb2.Token(
token="/",
type=root_pb2.TokenType.Anchor,
anchor=root_pb2.AnchorType.ForwardSlash
),
]
)
]
))
} | 36.304348 | 83 | 0.371086 | 349 | 5,845 | 6.025788 | 0.131805 | 0.2097 | 0.114123 | 0.161674 | 0.902996 | 0.898716 | 0.898716 | 0.851165 | 0.833096 | 0.817404 | 0 | 0.023783 | 0.546792 | 5,845 | 161 | 84 | 36.304348 | 0.770102 | 0.01095 | 0 | 0.803922 | 0 | 0 | 0.011427 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.03268 | 0 | 0.045752 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
86f32b1bb1784d2b642472550b27c9d80f34f2b3 | 36,299 | py | Python | python/ngsi_v2/ngsi_v2/api/subscriptions_api.py | orchestracities/sdk | 9dd1e618d6c013ab916f3880df84c7882f6beec6 | [
"Apache-2.0"
] | 2 | 2019-12-22T01:01:34.000Z | 2021-07-03T20:30:03.000Z | python/ngsi_v2/ngsi_v2/api/subscriptions_api.py | orchestracities/sdk | 9dd1e618d6c013ab916f3880df84c7882f6beec6 | [
"Apache-2.0"
] | 2 | 2019-06-06T05:45:45.000Z | 2019-06-06T09:03:10.000Z | python/ngsi_v2/ngsi_v2/api/subscriptions_api.py | orchestracities/sdk | 9dd1e618d6c013ab916f3880df84c7882f6beec6 | [
"Apache-2.0"
] | 2 | 2021-07-03T20:30:06.000Z | 2021-11-30T21:55:02.000Z | # coding: utf-8
"""
ngsi_v2
NGSI V2 API RC-2018.07 # noqa: E501
The version of the OpenAPI document: 0.2.2
Contact: info@orchestracities.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ngsi_v2.api_client import ApiClient
from ngsi_v2.exceptions import (
ApiTypeError,
ApiValueError
)
class SubscriptionsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_a_new_subscription(self, subscription_body, **kwargs): # noqa: E501
"""create_a_new_subscription # noqa: E501
Creates a new subscription. The subscription is represented by a JSON object as described at the beginning of this section. Response: * Successful operation uses 201 Created * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_a_new_subscription(subscription_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param SubscriptionBody subscription_body: (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_a_new_subscription_with_http_info(subscription_body, **kwargs) # noqa: E501
def create_a_new_subscription_with_http_info(self, subscription_body, **kwargs): # noqa: E501
"""create_a_new_subscription # noqa: E501
Creates a new subscription. The subscription is represented by a JSON object as described at the beginning of this section. Response: * Successful operation uses 201 Created * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_a_new_subscription_with_http_info(subscription_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param SubscriptionBody subscription_body: (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['subscription_body', 'fiware_service', 'fiware_service_path'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_a_new_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_body' is set
if ('subscription_body' not in local_var_params or
local_var_params['subscription_body'] is None):
raise ApiValueError("Missing the required parameter `subscription_body` when calling `create_a_new_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'fiware_service' in local_var_params:
header_params['Fiware-Service'] = local_var_params['fiware_service'] # noqa: E501
if 'fiware_service_path' in local_var_params:
header_params['Fiware-ServicePath'] = local_var_params['fiware_service_path'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'subscription_body' in local_var_params:
body_params = local_var_params['subscription_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_subscription(self, subscription_id, **kwargs): # noqa: E501
"""delete_subscription # noqa: E501
Cancels subscription. Response: * Successful operation uses 204 No Content * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription(subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501
def delete_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501
"""delete_subscription # noqa: E501
Cancels subscription. Response: * Successful operation uses 204 No Content * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription_with_http_info(subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['subscription_id', 'fiware_service', 'fiware_service_path'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if ('subscription_id' not in local_var_params or
local_var_params['subscription_id'] is None):
raise ApiValueError("Missing the required parameter `subscription_id` when calling `delete_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
if 'fiware_service' in local_var_params:
header_params['Fiware-Service'] = local_var_params['fiware_service'] # noqa: E501
if 'fiware_service_path' in local_var_params:
header_params['Fiware-ServicePath'] = local_var_params['fiware_service_path'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/subscriptions/{subscriptionId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def retrieve_subscription(self, subscription_id, **kwargs): # noqa: E501
"""retrieve_subscription # noqa: E501
The response is the subscription represented by a JSON object as described at the beginning of this section. Response: * Successful operation uses 200 OK * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_subscription(subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Subscription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.retrieve_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501
def retrieve_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501
"""retrieve_subscription # noqa: E501
The response is the subscription represented by a JSON object as described at the beginning of this section. Response: * Successful operation uses 200 OK * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_subscription_with_http_info(subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Subscription, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['subscription_id', 'fiware_service', 'fiware_service_path'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method retrieve_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if ('subscription_id' not in local_var_params or
local_var_params['subscription_id'] is None):
raise ApiValueError("Missing the required parameter `subscription_id` when calling `retrieve_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
if 'fiware_service' in local_var_params:
header_params['Fiware-Service'] = local_var_params['fiware_service'] # noqa: E501
if 'fiware_service_path' in local_var_params:
header_params['Fiware-ServicePath'] = local_var_params['fiware_service_path'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/subscriptions/{subscriptionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Subscription', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def retrieve_subscriptions(self, **kwargs): # noqa: E501
"""retrieve_subscriptions # noqa: E501
Returns a list of all the subscriptions present in the system. Response: * Successful operation uses 200 OK * Errors use a non-2xx and (optionally) an error payload.See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_subscriptions(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param int limit: Limit the number of types to be retrieved
:param int offset: Skip a number of records
:param str options: Options dictionary
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[Subscription]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.retrieve_subscriptions_with_http_info(**kwargs) # noqa: E501
def retrieve_subscriptions_with_http_info(self, **kwargs): # noqa: E501
"""retrieve_subscriptions # noqa: E501
Returns a list of all the subscriptions present in the system. Response: * Successful operation uses 200 OK * Errors use a non-2xx and (optionally) an error payload.See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_subscriptions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param int limit: Limit the number of types to be retrieved
:param int offset: Skip a number of records
:param str options: Options dictionary
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[Subscription], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['fiware_service', 'fiware_service_path', 'limit', 'offset', 'options'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method retrieve_subscriptions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'options' in local_var_params:
query_params.append(('options', local_var_params['options'])) # noqa: E501
header_params = {}
if 'fiware_service' in local_var_params:
header_params['Fiware-Service'] = local_var_params['fiware_service'] # noqa: E501
if 'fiware_service_path' in local_var_params:
header_params['Fiware-ServicePath'] = local_var_params['fiware_service_path'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Subscription]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_subscription(self, subscription_id, subscription_body, **kwargs): # noqa: E501
"""update_subscription # noqa: E501
Only the fields included in the request are updated in the subscription. Response: * Successful operation uses 204 No Content * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_subscription(subscription_id, subscription_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param SubscriptionBody subscription_body: (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_subscription_with_http_info(subscription_id, subscription_body, **kwargs) # noqa: E501
def update_subscription_with_http_info(self, subscription_id, subscription_body, **kwargs): # noqa: E501
"""update_subscription # noqa: E501
Only the fields included in the request are updated in the subscription. Response: * Successful operation uses 204 No Content * Errors use a non-2xx and (optionally) an error payload. See subsection on [Error Responses](https://fiware.github.io/specifications/ngsiv2/stable) for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_subscription_with_http_info(subscription_id, subscription_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str subscription_id: subscription Id. (required)
:param SubscriptionBody subscription_body: (required)
:param str fiware_service: When \"-multiservice\" is used, Orion uses the \"Fiware-Service\" HTTP header in the request to identify the service/tenant. If the header is not present in the HTTP request, the default service/tenant is used..
:param str fiware_service_path: Fiware-ServicePath is an optional header. It is assumed that all the entities created without Fiware-ServicePath (or that don't include service path information in the database) belongs to a root scope \"/\" implicitely.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['subscription_id', 'subscription_body', 'fiware_service', 'fiware_service_path'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if ('subscription_id' not in local_var_params or
local_var_params['subscription_id'] is None):
raise ApiValueError("Missing the required parameter `subscription_id` when calling `update_subscription`") # noqa: E501
# verify the required parameter 'subscription_body' is set
if ('subscription_body' not in local_var_params or
local_var_params['subscription_body'] is None):
raise ApiValueError("Missing the required parameter `subscription_body` when calling `update_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
if 'fiware_service' in local_var_params:
header_params['Fiware-Service'] = local_var_params['fiware_service'] # noqa: E501
if 'fiware_service_path' in local_var_params:
header_params['Fiware-ServicePath'] = local_var_params['fiware_service_path'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'subscription_body' in local_var_params:
body_params = local_var_params['subscription_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/subscriptions/{subscriptionId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 56.805947 | 366 | 0.652525 | 4,324 | 36,299 | 5.272433 | 0.058279 | 0.033687 | 0.052812 | 0.019739 | 0.965786 | 0.958681 | 0.954294 | 0.943416 | 0.943416 | 0.937538 | 0 | 0.012789 | 0.274057 | 36,299 | 638 | 367 | 56.894984 | 0.852378 | 0.545938 | 0 | 0.791519 | 1 | 0 | 0.219595 | 0.041644 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038869 | false | 0 | 0.017668 | 0 | 0.095406 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8134caf3ddf17a61aabfee7389ac5b89b2b0d763 | 31,687 | py | Python | tests/cli/build/test_build.py | ashemedai/hatch | 9ec00d5e027c992efbc16dd777b1f6926368b6bf | [
"MIT"
] | null | null | null | tests/cli/build/test_build.py | ashemedai/hatch | 9ec00d5e027c992efbc16dd777b1f6926368b6bf | [
"MIT"
] | null | null | null | tests/cli/build/test_build.py | ashemedai/hatch | 9ec00d5e027c992efbc16dd777b1f6926368b6bf | [
"MIT"
] | null | null | null | import pytest
from hatch.project.core import Project
from hatchling.builders.constants import BuildEnvVars
@pytest.fixture(autouse=True)
def local_builder(mock_backend_process, mocker):
if mock_backend_process:
mocker.patch('hatch.env.virtual.VirtualEnvironment.build_environment')
yield
def test_backend_not_build_system(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
project = Project(path)
config = dict(project.raw_config)
config['build-system']['build-backend'] = 'foo'
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 1, result.output
assert result.output == helpers.dedent(
"""
Field `build-system.build-backend` must be set to `hatchling.build`
"""
)
def test_backend_not_build_dependency(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
project = Project(path)
config = dict(project.raw_config)
config['build-system']['requires'] = []
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 1, result.output
assert result.output == helpers.dedent(
"""
Field `build-system.requires` must specify `hatchling` as a requirement
"""
)
def test_no_targets(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build'].pop('targets')
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 1, result.output
assert result.output == helpers.dedent(
"""
No targets defined in project configuration.
Add one or more of the following build targets to pyproject.toml:
[tool.hatch.build.targets.custom]
[tool.hatch.build.targets.sdist]
[tool.hatch.build.targets.wheel]
"""
)
def test_unknown_targets(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build', '-t', 'foo')
assert result.exit_code == 1, result.output
assert result.output == helpers.dedent(
"""
Setting up build environment
Unknown build targets: foo
"""
)
def test_mutually_exclusive_hook_options(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build', '--hooks-only', '--no-hooks')
assert result.exit_code == 1, result.output
assert result.output == helpers.dedent(
"""
Setting up build environment
Cannot use both --hooks-only and --no-hooks together
"""
)
def test_default(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_explicit_targets(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build', '-t', 'wheel')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 1
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_explicit_directory(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_directory = temp_dir / 'dist'
with path.as_cwd():
result = hatch('build', str(build_directory))
assert result.exit_code == 0, result.output
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path}
Setting up build environment
[wheel]
{wheel_path}
"""
)
def test_explicit_directory_env_var(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_directory = temp_dir / 'dist'
with path.as_cwd({BuildEnvVars.LOCATION: str(build_directory)}):
result = hatch('build')
assert result.exit_code == 0, result.output
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path}
Setting up build environment
[wheel]
{wheel_path}
"""
)
def test_clean(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
result = hatch('version', 'minor')
assert result.exit_code == 0, result.output
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
assert (path / 'my_app' / 'lib.so').is_file()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 4
test_file = build_directory / 'test.txt'
test_file.touch()
with path.as_cwd():
result = hatch('version', '9000')
assert result.exit_code == 0, result.output
result = hatch('build', '-c')
assert result.exit_code == 0, result.output
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 3
assert test_file in artifacts
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
assert '9000' in str(sdist_path)
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert '9000' in str(wheel_path)
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_clean_env_var(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
result = hatch('version', 'minor')
assert result.exit_code == 0, result.output
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 4
test_file = build_directory / 'test.txt'
test_file.touch()
with path.as_cwd({BuildEnvVars.CLEAN: 'true'}):
result = hatch('version', '9000')
assert result.exit_code == 0, result.output
result = hatch('build')
assert result.exit_code == 0, result.output
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 3
assert test_file in artifacts
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
assert '9000' in str(sdist_path)
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert '9000' in str(wheel_path)
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_clean_only(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
build_artifact = path / 'my_app' / 'lib.so'
assert build_artifact.is_file()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
with path.as_cwd():
result = hatch('version', 'minor')
assert result.exit_code == 0, result.output
result = hatch('build', '--clean-only')
assert result.exit_code == 0, result.output
artifacts = list(build_directory.iterdir())
assert not artifacts
assert not build_artifact.exists()
assert result.output == helpers.dedent(
"""
Setting up build environment
Setting up build environment
"""
)
def test_clean_only_hooks_only(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
build_artifact = path / 'my_app' / 'lib.so'
assert build_artifact.is_file()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
with path.as_cwd():
result = hatch('version', 'minor')
assert result.exit_code == 0, result.output
result = hatch('build', '--clean-only', '--hooks-only')
assert result.exit_code == 0, result.output
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
assert not build_artifact.exists()
assert result.output == helpers.dedent(
"""
Setting up build environment
Setting up build environment
"""
)
def test_clean_hooks_after(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build', '--clean-hooks-after')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
build_artifact = path / 'my_app' / 'lib.so'
assert not build_artifact.exists()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_clean_hooks_after_env_var(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd({BuildEnvVars.CLEAN_HOOKS_AFTER: 'true'}):
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
build_artifact = path / 'my_app' / 'lib.so'
assert not build_artifact.exists()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_clean_only_no_hooks(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def clean(self, versions):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').unlink()
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
build_artifact = path / 'my_app' / 'lib.so'
assert build_artifact.is_file()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
with path.as_cwd():
result = hatch('version', 'minor')
assert result.exit_code == 0, result.output
result = hatch('build', '--clean-only', '--no-hooks')
assert result.exit_code == 0, result.output
artifacts = list(build_directory.iterdir())
assert not artifacts
assert build_artifact.is_file()
assert result.output == helpers.dedent(
"""
Setting up build environment
Setting up build environment
"""
)
def test_hooks_only(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('-v', 'build', '-t', 'wheel', '--hooks-only')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 0
assert (path / 'my_app' / 'lib.so').is_file()
assert result.output == helpers.dedent(
"""
Setting up build environment
[wheel]
Building `wheel` version `standard`
Only ran build hooks for `wheel` version `standard`
"""
)
def test_hooks_only_env_var(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd({BuildEnvVars.HOOKS_ONLY: 'true'}):
result = hatch('-v', 'build', '-t', 'wheel')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 0
assert (path / 'my_app' / 'lib.so').is_file()
assert result.output == helpers.dedent(
"""
Setting up build environment
[wheel]
Building `wheel` version `standard`
Only ran build hooks for `wheel` version `standard`
"""
)
def test_extensions_only(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('-v', 'build', '--ext')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 0
assert (path / 'my_app' / 'lib.so').is_file()
assert result.output == helpers.dedent(
"""
Setting up build environment
[wheel]
Building `wheel` version `standard`
Only ran build hooks for `wheel` version `standard`
"""
)
def test_no_hooks(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd():
result = hatch('build', '-t', 'wheel', '--no-hooks')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 1
assert not (path / 'my_app' / 'lib.so').exists()
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_no_hooks_env_var(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomHook(BuildHookInterface):
def initialize(self, version, build_data):
if self.target_name == 'wheel':
pathlib.Path('my_app', 'lib.so').touch()
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['hooks'] = {'custom': {'path': build_script.name}}
project.save_config(config)
with path.as_cwd({BuildEnvVars.NO_HOOKS: 'true'}):
result = hatch('build', '-t', 'wheel')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 1
assert not (path / 'my_app' / 'lib.so').exists()
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
def test_debug_verbosity(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('-v', 'build', '-t', 'wheel:standard')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 1
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[wheel]
Building `wheel` version `standard`
{wheel_path.relative_to(path)}
"""
)
@pytest.mark.allow_backend_process
def test_shipped(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 2
sdist_path = [artifact for artifact in artifacts if artifact.name.endswith('.tar.gz')][0]
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[sdist]
{sdist_path.relative_to(path)}
Setting up build environment
[wheel]
{wheel_path.relative_to(path)}
"""
)
@pytest.mark.allow_backend_process
def test_build_dependencies(hatch, temp_dir, helpers):
project_name = 'My App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert result.exit_code == 0, result.output
path = temp_dir / 'my-app'
build_script = path / 'build.py'
build_script.write_text(
helpers.dedent(
"""
import pathlib
import binary
from hatchling.builders.wheel import WheelBuilder
def get_builder():
return CustomWheelBuilder
class CustomWheelBuilder(WheelBuilder):
def build(self, *args, **kwargs):
pathlib.Path('test.txt').write_text(str(binary.convert_units(1024)))
yield from super().build(*args, **kwargs)
"""
)
)
project = Project(path)
config = dict(project.raw_config)
config['tool']['hatch']['build']['targets'] = {'custom': {'dependencies': ['binary']}}
project.save_config(config)
with path.as_cwd():
result = hatch('build')
assert result.exit_code == 0, result.output
build_directory = path / 'dist'
assert build_directory.is_dir()
artifacts = list(build_directory.iterdir())
assert len(artifacts) == 1
output_file = path / 'test.txt'
assert output_file.is_file()
assert str(output_file.read_text()) == "(1.0, 'KiB')"
wheel_path = [artifact for artifact in artifacts if artifact.name.endswith('.whl')][0]
assert result.output == helpers.dedent(
f"""
Setting up build environment
[custom]
{wheel_path.relative_to(path)}
"""
)
| 28.291964 | 93 | 0.605485 | 3,721 | 31,687 | 4.986025 | 0.04703 | 0.055624 | 0.053468 | 0.066836 | 0.937045 | 0.927451 | 0.925619 | 0.924163 | 0.923085 | 0.91969 | 0 | 0.005838 | 0.270237 | 31,687 | 1,119 | 94 | 28.317248 | 0.796489 | 0 | 0 | 0.83689 | 0 | 0 | 0.152213 | 0.022675 | 0 | 0 | 0 | 0 | 0.230183 | 1 | 0.03811 | false | 0 | 0.004573 | 0 | 0.042683 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d49a495768b0d1e4f0b54c103b1be6d699bdd4bb | 23 | py | Python | sst_base/detectors/__init__.py | NSLS-II-SST/sst_base | 0c227a4db04000461c7d7f722b6dfbf034504023 | [
"BSD-3-Clause"
] | null | null | null | sst_base/detectors/__init__.py | NSLS-II-SST/sst_base | 0c227a4db04000461c7d7f722b6dfbf034504023 | [
"BSD-3-Clause"
] | null | null | null | sst_base/detectors/__init__.py | NSLS-II-SST/sst_base | 0c227a4db04000461c7d7f722b6dfbf034504023 | [
"BSD-3-Clause"
] | null | null | null | from .i400 import I400
| 11.5 | 22 | 0.782609 | 4 | 23 | 4.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.315789 | 0.173913 | 23 | 1 | 23 | 23 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d4bc6e3b8a80b06d662815c11731170bc77673e7 | 748,551 | py | Python | src/genie/libs/parser/iosxe/tests/test_show_ospf.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/test_show_ospf.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/test_show_ospf.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null |
# Python
import unittest
from unittest.mock import Mock
# ATS
from ats.topology import Device
from ats.topology import loader
# Metaparser
from genie.metaparser.util.exceptions import SchemaEmptyParserError,\
SchemaMissingKeyError
# iosxe show_ospf
from genie.libs.parser.iosxe.show_ospf import (ShowIpOspf,
ShowIpOspfInterface,
ShowIpOspfNeighborDetail,
ShowIpOspfShamLinks,
ShowIpOspfVirtualLinks,
ShowIpOspfDatabase,
ShowIpOspfDatabaseRouter,
ShowIpOspfDatabaseExternal,
ShowIpOspfDatabaseNetwork,
ShowIpOspfDatabaseSummary,
ShowIpOspfDatabaseOpaqueArea,
ShowIpOspfMplsLdpInterface,
ShowIpOspfMplsTrafficEngLink,
ShowIpOspfMaxMetric,
ShowIpOspfTraffic,
ShowIpOspfNeighbor,
ShowIpOspfDatabaseRouterSelfOriginate,
ShowIpOspfInterfaceBrief,
ShowIpOspfSegmentRouting,
ShowIpOspfSegmentRoutingAdjacencySid,
ShowIpOspfSegmentRoutingLocalBlock,
ShowIpOspfSegmentRoutingGlobalBlock,
ShowIpOspfFastRerouteTiLfa,
ShowIpOspfDatabaseOpaqueAreaSelfOriginate,
ShowIpOspfSegmentRoutingProtectedAdjacencies,
ShowIpOspfSegmentRoutingSidDatabase,
ShowIpOspfDatabaseOpaqueAreaAdvRouter)
# =====================================================================
# Unit test for 'show ip ospf {process_id} segment-routing local-block'
# =====================================================================
class test_show_ip_ospf_segment_routing_local_block(unittest.TestCase):
'''Unit test for "show ip ospf" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output1 = {'execute.return_value': '''
PE1#show ip ospf 65109 segment-routing local-block
OSPF Router with ID (10.4.1.1) (Process ID 65109)
OSPF Segment Routing Local Blocks in Area 8
Router ID SR Capable SRLB Base SRLB Range
--------------------------------------------------------
*10.4.1.1 Yes 15000 1000
10.16.2.2 Yes 15000 1000
PE1#
'''}
golden_parsed_output1 = {
'instance':
{'65109':
{'router_id': '10.4.1.1',
'areas':
{'0.0.0.8':
{'router_id':
{'10.4.1.1':
{'sr_capable': 'Yes',
'srlb_base': 15000,
'srlb_range': 1000},
'10.16.2.2':
{'sr_capable': 'Yes',
'srlb_base': 15000,
'srlb_range': 1000}}}},
}}}
golden_parsed_output2 = {
"instance": {
"88": {
"router_id": "10.4.113.144",
"areas": {
"0.0.0.8": {
"router_id": {
"10.16.2.2": {
"sr_capable": "No"
},
"10.36.3.3": {
"sr_capable": "No"
},
"10.64.4.4": {
"sr_capable": "No"
},
"10.4.113.142": {
"sr_capable": "No"
},
"10.4.113.144": {
"sr_capable": "No"
},
"10.4.113.99": {
"sr_capable": "No"
}
}
}
}
}
}
}
golden_output2 = {'execute.return_value': '''
show ip ospf segment-routing local-block
OSPF Router with ID (10.4.113.144) (Process ID 88)
OSPF Segment Routing Local Blocks in Area 8
Router ID SR Capable SRLB Base SRLB Range
--------------------------------------------------------
10.16.2.2 No
10.36.3.3 No
10.64.4.4 No
10.4.113.142 No
*10.4.113.144 No
10.4.113.99 No
'''}
def test_show_ip_ospf_segment_routing_local_block_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfSegmentRoutingLocalBlock(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(process_id=65109)
def test_show_ip_ospf_segment_routing_local_block_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfSegmentRoutingLocalBlock(device=self.device)
parsed_output = obj.parse(process_id=65109)
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_golden2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfSegmentRoutingLocalBlock(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ============================
# Unit test for 'show ip ospf'
# ============================
class test_show_ip_ospf(unittest.TestCase):
'''Unit test for "show ip ospf" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'adjacency_stagger':
{'initial_number': 300,
'maximum_number': 300},
'area_transit': True,
'enable': True,
'areas':
{'0.0.0.0':
{'area_id': '0.0.0.0',
'area_type': 'normal',
'ranges':
{'10.4.0.0/16':
{'advertise': True,
'cost': 10,
'prefix': '10.4.0.0/16'}},
'rrr_enabled': True,
'statistics':
{'area_scope_lsa_cksum_sum': '0x07CF20',
'area_scope_lsa_count': 19,
'area_scope_opaque_lsa_cksum_sum': '0x000000',
'area_scope_opaque_lsa_count': 0,
'dcbitless_lsa_count': 5,
'donotage_lsa_count': 0,
'flood_list_length': 0,
'indication_lsa_count': 0,
'interfaces_count': 3,
'loopback_count': 1,
'spf_last_executed': '00:19:54.849',
'spf_runs_count': 41}}},
'auto_cost':
{'bandwidth_unit': 'mbps',
'enable': False,
'reference_bandwidth': 100},
'bfd':
{'enable': True,
'strict_mode': True},
'database_control':
{'max_lsa': 123},
'db_exchange_summary_list_optimization': True,
'elapsed_time': '1d01h',
'event_log':
{'enable': True,
'max_events': 1000,
'mode': 'cyclic'},
'external_flood_list_length': 0,
'graceful_restart':
{'cisco':
{'enable': False,
'helper_enable': True,
'type': 'cisco'},
'ietf':
{'enable': False,
'helper_enable': True,
'type': 'ietf'}},
'lls': True,
'lsa_group_pacing_timer': 240,
'nsr':
{'enable': False},
'nssa': True,
'numbers':
{'dc_bitless': 0,
'do_not_age': 0,
'external_lsa': 1,
'external_lsa_checksum': '0x007F60',
'opaque_as_lsa': 0,
'opaque_as_lsa_checksum': '0x000000'},
'opqaue_lsa': True,
'interface_flood_pacing_timer': 33,
'retransmission_pacing_timer': 66,
'router_id': '10.4.1.1',
'spf_control':
{'incremental_spf': False,
'throttle':
{'lsa':
{'arrival': 100,
'hold': 200,
'maximum': 5000,
'start': 50},
'spf':
{'hold': 200,
'maximum': 5000,
'start': 50}}},
'start_time': '00:23:49.050',
'stub_router':
{'always':
{'always': False,
'external_lsa': False,
'include_stub': False,
'summary_lsa': False}},
'total_areas': 1,
'total_areas_transit_capable': 0,
'total_normal_areas': 1,
'total_nssa_areas': 0,
'total_stub_areas': 0},
'2':
{'adjacency_stagger':
{'initial_number': 300,
'maximum_number': 300},
'area_transit': True,
'enable': False,
'areas':
{'0.0.0.1':
{'area_id': '0.0.0.1',
'area_type': 'normal',
'ranges':
{'10.4.1.0/24':
{'advertise': True,
'prefix': '10.4.1.0/24'}},
'statistics':
{'area_scope_lsa_cksum_sum': '0x053FED',
'area_scope_lsa_count': 11,
'area_scope_opaque_lsa_cksum_sum': '0x000000',
'area_scope_opaque_lsa_count': 0,
'dcbitless_lsa_count': 1,
'donotage_lsa_count': 0,
'flood_list_length': 0,
'indication_lsa_count': 0,
'interfaces_count': 2,
'spf_last_executed': '03:26:37.769',
'spf_runs_count': 97}}},
'auto_cost':
{'bandwidth_unit': 'mbps',
'enable': False,
'reference_bandwidth': 100},
'bfd':
{'enable': True},
'db_exchange_summary_list_optimization': True,
'domain_id_type': '0x0005',
'domain_id_value': '0.0.0.2',
'elapsed_time': '23:34:42.224',
'external_flood_list_length': 0,
'flags':
{'abr': True,
'asbr': True},
'graceful_restart':
{'cisco':
{'enable': False,
'helper_enable': True,
'type': 'cisco'},
'ietf':
{'enable': False,
'helper_enable': True,
'type': 'ietf'}},
'lls': True,
'lsa_group_pacing_timer': 240,
'nsr':
{'enable': True},
'nssa': True,
'numbers':
{'dc_bitless': 0,
'do_not_age': 0,
'external_lsa': 0,
'external_lsa_checksum': '0x000000',
'opaque_as_lsa': 0,
'opaque_as_lsa_checksum': '0x000000'},
'opqaue_lsa': True,
'redistribution':
{'bgp':
{'bgp_id': 100,
'subnets': 'subnets'
}},
'interface_flood_pacing_timer': 33,
'retransmission_pacing_timer': 66,
'router_id': '10.229.11.11',
'spf_control':
{'incremental_spf': False,
'throttle':
{'lsa':
{'arrival': 100,
'hold': 200,
'maximum': 5000,
'start': 50},
'spf':
{'hold': 200,
'maximum': 5000,
'start': 50}}},
'start_time': '02:17:25.010',
'stub_router':
{'always':
{'always': False,
'external_lsa': False,
'include_stub': False,
'summary_lsa': False}},
'total_areas': 1,
'total_areas_transit_capable': 0,
'total_normal_areas': 1,
'total_nssa_areas': 0,
'total_stub_areas': 0}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf
Routing Process "ospf 1" with ID 10.4.1.1
Start time: 00:23:49.050, Time elapsed: 1d01h
Supports only single TOS(TOS0) routes
Supports opaque LSA
Supports Link-local Signaling (LLS)
Supports area transit capability
Supports NSSA (compatible with RFC 3101)
Supports Database Exchange Summary List Optimization (RFC 5243)
Event-log enabled, Maximum number of events: 1000, Mode: cyclic
Router is not originating router-LSAs with maximum metric
Initial SPF schedule delay 50 msecs
Minimum hold time between two consecutive SPFs 200 msecs
Maximum wait time between two consecutive SPFs 5000 msecs
Incremental-SPF disabled
Initial LSA throttle delay 50 msecs
Minimum hold time for LSA throttle 200 msecs
Maximum wait time for LSA throttle 5000 msecs
Minimum LSA arrival 100 msecs
LSA group pacing timer 240 secs
Interface flood pacing timer 33 msecs
Retransmission pacing timer 66 msecs
Maximum number of non self-generated LSA allowed 123
EXCHANGE/LOADING adjacency limit: initial 300, process maximum 300
Number of external LSA 1. Checksum Sum 0x007F60
Number of opaque AS LSA 0. Checksum Sum 0x000000
Number of DCbitless external and opaque AS LSA 0
Number of DoNotAge external and opaque AS LSA 0
Number of areas in this router is 1. 1 normal 0 stub 0 nssa
Number of areas transit capable is 0
External flood list length 0
IETF NSF helper support enabled
Cisco NSF helper support enabled
BFD is enabled in strict mode
Reference bandwidth unit is 100 mbps
Area BACKBONE(0.0.0.0)
Number of interfaces in this area is 3 (1 loopback)
Area has RRR enabled
Area has no authentication
SPF algorithm last executed 00:19:54.849 ago
SPF algorithm executed 41 times
Area ranges are
10.4.0.0/16 Active(10 - configured) Advertise
Number of LSA 19. Checksum Sum 0x07CF20
Number of opaque link LSA 0. Checksum Sum 0x000000
Number of DCbitless LSA 5
Number of indication LSA 0
Number of DoNotAge LSA 0
Flood list length 0
Routing Process "ospf 2" with ID 10.229.11.11
Domain ID type 0x0005, value 0.0.0.2
Start time: 02:17:25.010, Time elapsed: 23:34:42.224
Routing Process is shutdown
Supports only single TOS(TOS0) routes
Supports opaque LSA
Supports Link-local Signaling (LLS)
Supports area transit capability
Supports NSSA (compatible with RFC 3101)
Supports Database Exchange Summary List Optimization (RFC 5243)
Connected to MPLS VPN Superbackbone, VRF VRF1
Event-log disabled
It is an area border and autonomous system boundary router
Redistributing External Routes from,
bgp 100, includes subnets in redistribution
Router is not originating router-LSAs with maximum metric
Initial SPF schedule delay 50 msecs
Minimum hold time between two consecutive SPFs 200 msecs
Maximum wait time between two consecutive SPFs 5000 msecs
Incremental-SPF disabled
Initial LSA throttle delay 50 msecs
Minimum hold time for LSA throttle 200 msecs
Maximum wait time for LSA throttle 5000 msecs
Minimum LSA arrival 100 msecs
LSA group pacing timer 240 secs
Interface flood pacing timer 33 msecs
Retransmission pacing timer 66 msecs
EXCHANGE/LOADING adjacency limit: initial 300, process maximum 300
Number of external LSA 0. Checksum Sum 0x000000
Number of opaque AS LSA 0. Checksum Sum 0x000000
Number of DCbitless external and opaque AS LSA 0
Number of DoNotAge external and opaque AS LSA 0
Number of areas in this router is 1. 1 normal 0 stub 0 nssa
Number of areas transit capable is 0
External flood list length 0
Non-Stop Routing enabled
IETF NSF helper support enabled
Cisco NSF helper support enabled
Reference bandwidth unit is 100 mbps
BFD is enabled
Area 1
Number of interfaces in this area is 2
Area has no authentication
SPF algorithm last executed 03:26:37.769 ago
SPF algorithm executed 97 times
Area ranges are
10.4.1.0/24 Passive Advertise
Number of LSA 11. Checksum Sum 0x053FED
Number of opaque link LSA 0. Checksum Sum 0x000000
Number of DCbitless LSA 1
Number of indication LSA 0
Number of DoNotAge LSA 0
Flood list length 0
'''}
golden_parsed_output2 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'65109': {
'adjacency_stagger': {
'initial_number': 300,
'maximum_number': 300
},
'area_transit': True,
'areas': {
'0.0.0.8': {
'area_id': '0.0.0.8',
'area_type': 'normal',
'ranges': {},
'statistics': {
'area_scope_lsa_cksum_sum': '0x07FAE2',
'area_scope_lsa_count': 21,
'area_scope_opaque_lsa_cksum_sum': '0x000000',
'area_scope_opaque_lsa_count': 0,
'dcbitless_lsa_count': 0,
'donotage_lsa_count': 0,
'flood_list_length': 0,
'indication_lsa_count': 0,
'interfaces_count': 2,
'loopback_count': 1,
'spf_last_executed': '13:02:02.080',
'spf_runs_count': 8
}
}
},
'auto_cost': {
'bandwidth_unit': 'mbps',
'enable': True,
'reference_bandwidth': 2488
},
'bfd': {
'enable': False
},
'db_exchange_summary_list_optimization': True,
'elapsed_time': '13:07:02.634',
'enable': True,
'event_log': {
'enable': True,
'max_events': 1000,
'mode': 'cyclic'
},
'external_flood_list_length': 0,
'graceful_restart': {
'cisco': {
'enable': False,
'helper_enable': True,
'type': 'cisco'
},
'ietf': {
'enable': False,
'helper_enable': True,
'type': 'ietf'
}
},
'interface_flood_pacing_timer': 33,
'lls': True,
'lsa_group_pacing_timer': 240,
'nsr': {
'enable': False
},
'nssa': True,
'numbers': {
'dc_bitless': 0,
'do_not_age': 0,
'external_lsa': 2,
'external_lsa_checksum': '0x00F934',
'opaque_as_lsa': 0,
'opaque_as_lsa_checksum': '0x000000'
},
'opqaue_lsa': True,
'retransmission_pacing_timer': 66,
'router_id': '10.169.197.254',
'spf_control': {
'incremental_spf': False,
'throttle': {
'lsa': {
'arrival': 100,
'hold': 200,
'maximum': 5000,
'start': 50},
'spf': {
'hold': 3000,
'maximum': 3000,
'start': 500
}
}
},
'start_time': '00:02:39.151',
'stub_router': {
'on_startup': {
'include_stub': True,
'on_startup': 300,
'state': 'inactive'
}
},
'total_areas': 1,
'total_areas_transit_capable': 0,
'total_normal_areas': 1,
'total_nssa_areas': 0,
'total_stub_areas': 0,
}
}
}
}
}
}
}
golden_output2 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf
Load for five secs: 1%/0%; one minute: 1%; five minutes: 1%
Time source is NTP, 23:17:46.919 EST Fri May 3 2019
Routing Process "ospf 65109" with ID 10.169.197.254
Start time: 00:02:39.151, Time elapsed: 13:07:02.634
Supports only single TOS(TOS0) routes
Supports opaque LSA
Supports Link-local Signaling (LLS)
Supports area transit capability
Supports NSSA (compatible with RFC 3101)
Supports Database Exchange Summary List Optimization (RFC 5243)
Event-log enabled, Maximum number of events: 1000, Mode: cyclic
Originating router-LSAs with maximum metric
Condition: on startup for 300 seconds, State: inactive
Advertise stub links with maximum metric in router-LSAs
Unset reason: timer expired, Originated for 300 seconds
Unset time: 00:07:39.152, Time elapsed: 13:02:02.633
Initial SPF schedule delay 500 msecs
Minimum hold time between two consecutive SPFs 3000 msecs
Maximum wait time between two consecutive SPFs 3000 msecs
Incremental-SPF disabled
Initial LSA throttle delay 50 msecs
Minimum hold time for LSA throttle 200 msecs
Maximum wait time for LSA throttle 5000 msecs
Minimum LSA arrival 100 msecs
LSA group pacing timer 240 secs
Interface flood pacing timer 33 msecs
Retransmission pacing timer 66 msecs
EXCHANGE/LOADING adjacency limit: initial 300, process maximum 300
Number of external LSA 2. Checksum Sum 0x00F934
Number of opaque AS LSA 0. Checksum Sum 0x000000
Number of DCbitless external and opaque AS LSA 0
Number of DoNotAge external and opaque AS LSA 0
Number of areas in this router is 1. 1 normal 0 stub 0 nssa
Number of areas transit capable is 0
External flood list length 0
IETF NSF helper support enabled
Cisco NSF helper support enabled
Reference bandwidth unit is 2488 mbps
Area 8
Number of interfaces in this area is 2 (1 loopback)
Area has no authentication
SPF algorithm last executed 13:02:02.080 ago
SPF algorithm executed 8 times
Area ranges are
Number of LSA 21. Checksum Sum 0x07FAE2
Number of opaque link LSA 0. Checksum Sum 0x000000
Number of DCbitless LSA 0
Number of indication LSA 0
Number of DoNotAge LSA 0
Flood list length 0
'''}
def test_show_ip_ospf_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspf(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_full2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspf(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspf(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_ip_ospf_interface_brief(unittest.TestCase):
'''Unit test for "show ip ospf interface brief" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output_brief = {
'instance': {
'65109': {
'areas': {
'0.0.0.8': {
'interfaces': {
'Loopback0': {
'ip_address': '10.169.197.254/32',
'cost': 1,
'state': 'LOOP',
'nbrs_full': 0,
'nbrs_count': 0,
},
'GigabitEthernet4': {
'ip_address': '10.169.197.98/30',
'cost': 1000,
'state': 'P2P',
'nbrs_full': 1,
'nbrs_count': 1,
},
'GigabitEthernet2': {
'ip_address': '10.169.197.94/30',
'cost': 1000,
'state': 'BDR',
'nbrs_full': 1,
'nbrs_count': 1,
},
},
},
},
},
},
}
golden_output_brief = {'execute.return_value': '''
show ip ospf interface brief
Load for five secs: 2%/0%; one minute: 2%; five minutes: 1%
Time source is NTP, 01:20:44.789 EST Wed Jul 17 2019
Interface PID Area IP Address/Mask Cost State Nbrs F/C
Lo0 65109 8 10.169.197.254/32 1 LOOP 0/0
Gi4 65109 8 10.169.197.98/30 1000 P2P 1/1
Gi2 65109 8 10.169.197.94/30 1000 BDR 1/1
'''}
def test_show_ip_ospf_interface_brief(self):
self.device = Mock(**self.golden_output_brief)
obj = ShowIpOspfInterfaceBrief(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_brief)
# ======================================
# Unit test for 'show ip ospf interface'
# ======================================
class test_show_ip_ospf_interface(unittest.TestCase):
'''Unit test for "show ip ospf interface" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'interfaces':
{'GigabitEthernet3':
{'attached': 'interface enable',
'bdr_ip_addr': '10.186.5.5',
'bdr_router_id': '10.115.55.55',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.186.5.1',
'dr_router_id': '10.229.11.11',
'enable': True,
'flood_queue_length': 0,
'ipfrr_candidate': True,
'ipfrr_protected': True,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'lls': True,
'oob_resync_timeout': 40,
'hello_interval': 10,
'hello_timer': '00:00:08',
'if_cfg': True,
'index': '1/1/1',
'interface_id': 9,
'interface_type': 'broadcast',
'ip_address': '10.186.5.1/24',
'last_flood_scan_length': 0,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'max_flood_scan_length': 7,
'max_flood_scan_time_msec': 1,
'name': 'GigabitEthernet3',
'neighbors':
{'10.115.55.55':
{'bdr_router_id': '10.115.55.55'}},
'next': '0x0(0)/0x0(0)/0x0(0)',
'passive': False,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.229.11.11',
'state': 'dr',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'ti_lfa_protected': False,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40}},
'sham_links':
{'10.229.11.11 10.151.22.22':
{'attached': 'not attached',
'bfd':
{'enable': False},
'cost': 111,
'dead_interval': 40,
'demand_circuit': True,
'enable': True,
'flood_queue_length': 0,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'lls': True,
'oob_resync_timeout': 40,
'hello_interval': 10,
'hello_timer': '00:00:00',
'index': '1/2/2',
'interface_id': 14,
'interface_type': 'sham-link',
'ip_address': '0.0.0.0/0',
'last_flood_scan_length': 1,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'max_flood_scan_length': 5,
'max_flood_scan_time_msec': 1,
'name': 'SL1',
'next': '0x0(0)/0x0(0)/0x0(0)',
'passive': False,
'retransmit_interval': 5,
'router_id': '10.229.11.11',
'state': 'point-to-point',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'ti_lfa_protected': False,
'ttl_security':
{'enable': True,
'hops': 3},
'topology':
{0:
{'cost': 111,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40}}}}}}}}},
'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'interfaces':
{'GigabitEthernet1':
{'attached': 'interface enable',
'authentication':
{'auth_trailer_key':
{'crypto_algorithm': 'md5',
'youngest_key_id': 2}},
'bdr_ip_addr': '10.1.4.1',
'bdr_router_id': '10.4.1.1',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.1.4.4',
'dr_router_id': '10.64.4.4',
'enable': True,
'flood_queue_length': 0,
'ipfrr_candidate': True,
'ipfrr_protected': True,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'lls': True,
'oob_resync_timeout': 40,
'hello_interval': 10,
'hello_timer': '00:00:08',
'if_cfg': True,
'index': '1/2/2',
'interface_id': 7,
'interface_type': 'broadcast',
'ip_address': '10.1.4.1/24',
'last_flood_scan_length': 3,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'max_flood_scan_length': 3,
'max_flood_scan_time_msec': 1,
'name': 'GigabitEthernet1',
'neighbors':
{'10.64.4.4':
{'dr_router_id': '10.64.4.4'}},
'next': '0x0(0)/0x0(0)/0x0(0)',
'passive': False,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.4.1.1',
'state': 'bdr',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'ti_lfa_protected': False,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40},
'GigabitEthernet2':
{'attached': 'interface enable',
'authentication':
{'auth_trailer_key':
{'crypto_algorithm': 'simple'}},
'bdr_ip_addr': '10.1.2.2',
'bdr_router_id': '10.16.2.2',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.1.2.1',
'dr_router_id': '10.4.1.1',
'enable': True,
'flood_queue_length': 0,
'ipfrr_candidate': True,
'ipfrr_protected': True,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'lls': True,
'oob_resync_timeout': 40,
'hello_interval': 10,
'hello_timer': '00:00:05',
'if_cfg': True,
'index': '1/3/3',
'interface_id': 8,
'interface_type': 'broadcast',
'ip_address': '10.1.2.1/24',
'last_flood_scan_length': 1,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'max_flood_scan_length': 3,
'max_flood_scan_time_msec': 1,
'name': 'GigabitEthernet2',
'neighbors':
{'10.16.2.2':
{'bdr_router_id': '10.16.2.2'}},
'next': '0x0(0)/0x0(0)/0x0(0)',
'passive': False,
'prefix_suppression': True,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.4.1.1',
'state': 'dr',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'ti_lfa_protected': False,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40},
'Loopback0':
{'attached': 'interface enable',
'bfd':
{'enable': False},
'cost': 1,
'demand_circuit': False,
'enable': True,
'if_cfg': True,
'interface_id': 11,
'interface_type': 'loopback',
'ip_address': '10.4.1.1/32',
'line_protocol': True,
'name': 'Loopback0',
'router_id': '10.4.1.1',
'stub_host': True,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}}}}}}}}}}}}}
golden_parsed_output2 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'interfaces':
{'Loopback1':
{'attached': 'interface enable',
'bfd':
{'enable': False},
'cost': 1,
'demand_circuit': False,
'enable': True,
'if_cfg': True,
'interface_type': 'loopback',
'ip_address': '10.94.44.44/32',
'line_protocol': True,
'name': 'Loopback1',
'router_id': '10.64.4.4',
'stub_host': True,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}}}},
'virtual_links':
{'0.0.0.1 10.64.4.4':
{'attached': 'not attached',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 44,
'demand_circuit': True,
'enable': True,
'flood_queue_length': 0,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'hello_interval': 4,
'hello_timer': '00:00:02',
'index': '2/6',
'interface_type': 'virtual-link',
'ip_address': '10.19.4.4/24',
'last_flood_scan_length': 2,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'lls': True,
'max_flood_scan_length': 8,
'max_flood_scan_time_msec': 0,
'name': 'VL1',
'next': '0x0(0)/0x0(0)',
'oob_resync_timeout': 44,
'passive': False,
'retransmit_interval': 5,
'router_id': '10.64.4.4',
'state': 'point-to-point',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40}}}}}}}}},
'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.1':
{'interfaces':
{'GigabitEthernet0/0':
{'attached': 'interface enable',
'bdr_ip_addr': '10.229.4.2',
'bdr_router_id': '10.16.2.2',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.229.4.4',
'dr_router_id': '10.64.4.4',
'enable': True,
'flood_queue_length': 0,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'hello_interval': 10,
'hello_timer': '00:00:02',
'if_cfg': True,
'index': '2/3',
'interface_type': 'broadcast',
'ip_address': '10.229.4.4/24',
'last_flood_scan_length': 1,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'lls': True,
'max_flood_scan_length': 10,
'max_flood_scan_time_msec': 10,
'name': 'GigabitEthernet0/0',
'neighbors':
{'10.16.2.2':
{'bdr_router_id': '10.16.2.2'}},
'next': '0x0(0)/0x0(0)',
'oob_resync_timeout': 40,
'passive': False,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.64.4.4',
'state': 'dr',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40},
'GigabitEthernet0/1':
{'attached': 'interface enable',
'bdr_ip_addr': '10.19.4.3',
'bdr_router_id': '10.36.3.3',
'bfd':
{'enable': False},
'cost': 1,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.19.4.4',
'dr_router_id': '10.64.4.4',
'enable': True,
'flood_queue_length': 0,
'graceful_restart':
{'cisco':
{'helper': True,
'type': 'cisco'},
'ietf':
{'helper': True,
'type': 'ietf'}},
'hello_interval': 10,
'hello_timer': '00:00:02',
'if_cfg': True,
'index': '3/4',
'interface_type': 'broadcast',
'ip_address': '10.19.4.4/24',
'last_flood_scan_length': 0,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'lls': True,
'max_flood_scan_length': 11,
'max_flood_scan_time_msec': 1,
'name': 'GigabitEthernet0/1',
'neighbors':
{'10.36.3.3':
{'bdr_router_id': '10.36.3.3'}},
'next': '0x0(0)/0x0(0)',
'oob_resync_timeout': 40,
'passive': False,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.64.4.4',
'state': 'dr',
'statistics':
{'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40},
'Loopback0':
{'attached': 'interface enable',
'bfd':
{'enable': False},
'cost': 1,
'demand_circuit': False,
'enable': True,
'if_cfg': True,
'interface_type': 'loopback',
'ip_address': '10.64.4.4/32',
'line_protocol': True,
'name': 'Loopback0',
'router_id': '10.64.4.4',
'stub_host': True,
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}}}}}}}}}}}}}
golden_parsed_output3 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'65109': {
'areas': {
'0.0.0.8': {
'interfaces': {
'GigabitEthernet2': {
'attached': 'network statement',
'bdr_ip_addr': '10.169.197.94',
'bdr_router_id': '10.169.197.254',
'bfd': {'enable': False},
'cost': 1000,
'dead_interval': 40,
'demand_circuit': False,
'dr_ip_addr': '10.169.197.93',
'dr_router_id': '10.169.197.252',
'enable': True,
'flood_queue_length': 0,
'graceful_restart': {
'cisco': {
'helper': True,
'type': 'cisco'},
'ietf': {
'helper': True,
'type': 'ietf'}},
'hello_interval': 10,
'hello_timer': '00:00:06',
'index': '1/1/1',
'interface_id': 8,
'interface_type': 'broadcast',
'ip_address': '10.169.197.94/30',
'ipfrr_candidate': True,
'ipfrr_protected': True,
'last_flood_scan_length': 3,
'last_flood_scan_time_msec': 0,
'line_protocol': True,
'lls': True,
'max_flood_scan_length': 10,
'max_flood_scan_time_msec': 1,
'name': 'GigabitEthernet2',
'neighbors': {'10.169.197.252': {'dr_router_id': '10.169.197.252'}},
'next': '0x0(0)/0x0(0)/0x0(0)',
'oob_resync_timeout': 40,
'passive': False,
'priority': 1,
'retransmit_interval': 5,
'router_id': '10.169.197.254',
'state': 'bdr',
'statistics': {
'adj_nbr_count': 1,
'nbr_count': 1,
'num_nbrs_suppress_hello': 0},
'ti_lfa_protected': False,
'topology': {
0: {
'cost': 1000,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'transmit_delay': 1,
'wait_interval': 40}}}}}}}}}}}
golden_parsed_output4 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'8888': {
'areas': {
'0.0.0.8': {
'interfaces': {
'GigabitEthernet2': {
'router_id': '10.4.1.1',
'interface_type': 'point-to-point',
'cost': 1,
'demand_circuit': False,
'bfd': {
'enable': False,
},
'name': 'GigabitEthernet2',
'ip_address': '10.0.0.6/30',
'interface_id': 8,
'attached': 'network statement',
'enable': True,
'line_protocol': True,
'topology': {
0: {
'cost': 1,
'name': 'Base',
'disabled': False,
'shutdown': False,
},
},
'transmit_delay': 1,
'state': 'point-to-point',
'hello_interval': 10,
'dead_interval': 40,
'wait_interval': 40,
'retransmit_interval': 5,
'oob_resync_timeout': 40,
'passive': False,
'hello_timer': '00:00:00',
'lls': True,
'graceful_restart': {
'cisco': {
'type': 'cisco',
'helper': True,
},
'ietf': {
'type': 'ietf',
'helper': True,
},
},
'ipfrr_protected': True,
'ipfrr_candidate': True,
'ti_lfa_protected': False,
'index': '1/1/1',
'flood_queue_length': 0,
'next': '0x0(0)/0x0(0)/0x0(0)',
'last_flood_scan_length': 1,
'max_flood_scan_length': 14,
'last_flood_scan_time_msec': 1,
'max_flood_scan_time_msec': 8,
'statistics': {
'nbr_count': 1,
'adj_nbr_count': 1,
'num_nbrs_suppress_hello': 0,
},
'teapp': {
'topology_id': '0x0',
'SRTE': {
'affinity': {
'length': 32,
'bits': '0x00000010',
},
'extended_affinity': {
'length': 32,
'bits': '0x00000010',
},
},
},
'sr_policy_manager': {
'te_opaque_lsa': 'Source of link information OSPF',
},
'sr_mpls_enabled': True,
},
},
},
},
},
},
},
},
},
},
}
def test_show_ip_ospf_interface_full1(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R1_ospf_xe#show ip ospf interface
Loopback0 is up, line protocol is up
Internet Address 10.4.1.1/32, Interface ID 11, Area 0
Attached via Interface Enable
Process ID 1, Router ID 10.4.1.1, Network Type LOOPBACK, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Loopback interface is treated as a stub Host
GigabitEthernet2 is up, line protocol is up (connected)
Internet Address 10.1.2.1/24, Interface ID 8, Area 0
Attached via Interface Enable
Process ID 1, Router ID 10.4.1.1, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.4.1.1, Interface address 10.1.2.1
Backup Designated router (ID) 10.16.2.2, Interface address 10.1.2.2
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:05
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Prefix-suppression is enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/3/3, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 3
Last flood scan time is 0 msec, maximum is 1 msec
Simple password authentication enabled
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
GigabitEthernet1 is up, line protocol is up
Internet Address 10.1.4.1/24, Interface ID 7, Area 0
Attached via Interface Enable
Process ID 1, Router ID 10.4.1.1, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State BDR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.1.4.4
Backup Designated router (ID) 10.4.1.1, Interface address 10.1.4.1
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:08
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/2/2, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 3, maximum is 3
Last flood scan time is 0 msec, maximum is 1 msec
Cryptographic authentication enabled
Youngest key id is 2
Rollover in progress, 1 neighbor(s) using the old key(s):
key id 1 algorithm MD5
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.64.4.4 (Designated Router)
Suppress hello for 0 neighbor(s)
OSPF_SL1 is up, line protocol is up
Internet Address 0.0.0.0/0, Interface ID 14, Area 1
Attached via Not Attached
Process ID 2, Router ID 10.229.11.11, Network Type SHAM_LINK, Cost: 111
Topology-MTID Cost Disabled Shutdown Topology Name
0 111 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 1)
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Strict TTL checking enabled, up to 3 hops allowed
Can not be protected by per-prefix Loop-Free FastReroute
Can not be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/2/2, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 5
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.151.22.22
Suppress hello for 0 neighbor(s)
GigabitEthernet3 is up, line protocol is up
Internet Address 10.186.5.1/24, Interface ID 9, Area 1
Attached via Interface Enable
Process ID 2, Router ID 10.229.11.11, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.229.11.11, Interface address 10.186.5.1
Backup Designated router (ID) 10.115.55.55, Interface address 10.186.5.5
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:08
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/1/1, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 0, maximum is 7
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.115.55.55 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw2 = '''\
R1_ospf_xe#show ip ospf sham-links | i OSPF_SL1
Sham Link OSPF_SL1 to address 10.151.22.22 is up
'''
raw3 = '''\
R1_ospf_xe#show running-config | i sham-link | i 10.151.22.22
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
'''
raw4 = '''\
R1_ospf_xe#show running-config | section router ospf 1
router ospf 1
mpls traffic-eng router-id Loopback0
mpls traffic-eng area 0
'''
raw5 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf interface'] = raw1
self.outputs['show ip ospf sham-links | i OSPF_SL1'] = raw2
self.outputs['show running-config | i sham-link | i 10.151.22.22'] = raw3
self.outputs['show running-config | section router ospf 1'] = raw4
self.outputs['show running-config | section router ospf 2'] = raw5
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_interface_full2(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R4_ospf_iosv#show ip ospf interface (including virtual-link)
OSPF_VL1 is up, line protocol is up
Internet Address 10.19.4.4/24, Area 1, Attached via Not Attached
Process ID 2, Router ID 10.64.4.4, Network Type VIRTUAL_LINK, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 7)
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 4, Dead 44, Wait 40, Retransmit 5
oob-resync timeout 44
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/6, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 2, maximum is 8
Last flood scan time is 0 msec, maximum is 0 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.36.3.3
Suppress hello for 0 neighbor(s)
Loopback0 is up, line protocol is up
Internet Address 10.64.4.4/32, Area 1, Attached via Interface Enable
Process ID 1, Router ID 10.64.4.4, Network Type LOOPBACK, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Loopback interface is treated as a stub Host
GigabitEthernet0/1 is up, line protocol is up
Internet Address 10.19.4.4/24, Area 1, Attached via Interface Enable
Process ID 1, Router ID 10.64.4.4, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.19.4.4
Backup Designated router (ID) 10.36.3.3, Interface address 10.19.4.3
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 3/4, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 0, maximum is 11
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.36.3.3 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
GigabitEthernet0/0 is up, line protocol is up
Internet Address 10.229.4.4/24, Area 1, Attached via Interface Enable
Process ID 1, Router ID 10.64.4.4, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.229.4.4
Backup Designated router (ID) 10.16.2.2, Interface address 10.229.4.2
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/3, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 0 msec, maximum is 10 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
Loopback1 is up, line protocol is up
Internet Address 10.94.44.44/32, Area 1, Attached via Interface Enable
Process ID 2, Router ID 10.64.4.4, Network Type LOOPBACK, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Loopback interface is treated as a stub Host
'''
raw2 = '''\
R1_ospf_xe#show ip ospf virtual-links | i OSPF_VL1
Virtual Link OSPF_VL1 to router 10.100.5.5 is down
'''
raw3 = '''\
R1_ospf_xe#show running-config | i virtual-link | i 10.100.5.5
area 1 virtual-link 10.100.5.5
'''
raw4 = '''\
R1_ospf_xe#show running-config | section router ospf 1
router ospf 1
mpls traffic-eng router-id Loopback0
mpls traffic-eng area 0
'''
raw5 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf interface'] = raw1
self.outputs['show ip ospf virtual-links | i OSPF_VL1'] = raw2
self.outputs['show running-config | i virtual-link | i 10.100.5.5'] = raw3
self.outputs['show running-config | section router ospf 1'] = raw4
self.outputs['show running-config | section router ospf 2'] = raw5
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_interface_full3(self):
self.maxDiff = None
raw1='''\
show ip ospf interface GigabitEthernet2
Load for five secs: 2%/0%; one minute: 2%; five minutes: 2%
Time source is NTP, 04:44:14.272 EST Sat Jun 15 2019
GigabitEthernet2 is up, line protocol is up
Internet Address 10.169.197.94/30, Interface ID 8, Area 8
Attached via Network Statement
Process ID 65109, Router ID 10.169.197.254, Network Type BROADCAST, Cost: 1000
Topology-MTID Cost Disabled Shutdown Topology Name
0 1000 no no Base
Transmit Delay is 1 sec, State BDR, Priority 1
Designated Router (ID) 10.169.197.252, Interface address 10.169.197.93
Backup Designated router (ID) 10.169.197.254, Interface address 10.169.197.94
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:06
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/1/1, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 3, maximum is 10
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.169.197.252 (Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw2='''\
show running-config | section router ospf 65109
router ospf 65109
router-id 10.169.197.254
max-metric router-lsa on-startup 300
auto-cost reference-bandwidth 2488
timers throttle spf 500 3000 3000
network 10.1.8.0 0.0.0.255 area 8
network 10.169.197.4 0.0.0.3 area 8
network 10.169.197.88 0.0.0.3 area 8
network 10.169.197.92 0.0.0.3 area 8
network 10.169.197.96 0.0.0.3 area 8
network 10.169.197.254 0.0.0.0 area 8
mpls ldp sync
action 50 cli command "router ospf 65109"
'''
def mapper(key):
return self.outputs[key]
self.outputs = {}
self.outputs['show ip ospf interface GigabitEthernet2'] = raw1
self.outputs['show running-config | section router ospf 65109'] = raw2
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfInterface(device=self.device)
parsed_output = obj.parse(interface='GigabitEthernet2')
self.assertEqual(parsed_output, self.golden_parsed_output3)
def test_show_ip_ospf_interface_full4(self):
self.maxDiff = None
raw1='''\
show ip ospf interface GigabitEthernet2
GigabitEthernet2 is up, line protocol is up
Internet Address 10.0.0.6/30, Interface ID 8, Area 8
Attached via Network Statement
Process ID 8888, Router ID 10.4.1.1, Network Type POINT_TO_POINT, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Segment Routing enabled for MPLS forwarding
Index 1/1/1, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 14
Last flood scan time is 1 msec, maximum is 8 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.229.11.11
Suppress hello for 0 neighbor(s)
TEAPP:
Topology Id:0x0
TEAPP:SRTE
Affinity: length 32, bits 0x00000010
Extended affinity: length 32, bits 0x00000010
SR Policy Manager:
TE Opaque LSA: Source of link information OSPF
'''
raw2='''\
PE1#show running-config | section router ospf 8888
router ospf 8888
router-id 10.4.1.1
segment-routing area 8 mpls
segment-routing mpls
network 0.0.0.0 255.255.255.255 area 8
'''
def mapper(key):
return self.outputs[key]
self.outputs = {}
self.outputs['show ip ospf interface GigabitEthernet2'] = raw1
self.outputs['show running-config | section router ospf 8888'] = raw2
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfInterface(device=self.device)
parsed_output = obj.parse(interface='GigabitEthernet2')
self.assertEqual(parsed_output, self.golden_parsed_output4)
def test_show_ip_ospf_interface_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ============================================
# Unit test for 'show ip ospf neighbor detail'
# ============================================
class test_show_ip_ospf_neighbor_detail(unittest.TestCase):
'''Unit test for "show ip ospf neighbor detail" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'interfaces':
{'GigabitEthernet3':
{'neighbors':
{'10.115.55.55':
{'address': '10.186.5.5',
'bdr_ip_addr': '10.186.5.5',
'dead_timer': '00:00:34',
'dr_ip_addr': '10.186.5.1',
'first': '0x0(0)/0x0(0)/0x0(0)',
'index': '1/1/1,',
'interface': 'GigabitEthernet3',
'neighbor_router_id': '10.115.55.55',
'uptime': '15:47:14',
'next': '0x0(0)/0x0(0)/0x0(0)',
'priority': 1,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 6,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 1,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 6}}}}},
'sham_links':
{'10.229.11.11 10.151.22.22':
{'neighbors':
{'10.151.22.22':
{'address': '10.151.22.22',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:35',
'dr_ip_addr': '0.0.0.0',
'first': '0x0(0)/0x0(0)/0x0(0)',
'hello_options': '0x2',
'dbd_options': '0x42',
'index': '1/2/2,',
'interface': 'OSPF_SL1',
'neighbor_router_id': '10.151.22.22',
'uptime': '07:41:59',
'next': '0x0(0)/0x0(0)/0x0(0)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 1,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 2}}}}}}}}}}}},
'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'interfaces':
{'GigabitEthernet1':
{'neighbors':
{'10.64.4.4':
{'address': '10.1.4.4',
'bdr_ip_addr': '10.1.4.1',
'dead_timer': '00:00:35',
'dr_ip_addr': '10.1.4.4',
'first': '0x0(0)/0x0(0)/0x0(0)',
'index': '1/1/1,',
'interface': 'GigabitEthernet1',
'neighbor_router_id': '10.64.4.4',
'uptime': '1d01h',
'next': '0x0(0)/0x0(0)/0x0(0)',
'priority': 1,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 1}}}},
'GigabitEthernet2':
{'neighbors':
{'10.16.2.2':
{'address': '10.1.2.2',
'bdr_ip_addr': '10.1.2.2',
'dead_timer': '00:00:33',
'dr_ip_addr': '10.1.2.1',
'first': '0x0(0)/0x0(0)/0x0(0)',
'hello_options': '0x2',
'dbd_options': '0x42',
'index': '1/2/2,',
'interface': 'GigabitEthernet2',
'interface_id': 'unknown',
'neighbor_router_id': '10.16.2.2',
'uptime': '08:04:20',
'next': '0x0(0)/0x0(0)/0x0(0)',
'priority': 1,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 0,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 0}}}}}}}}}}}}}}
golden_parsed_output2 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.1':
{'interfaces':
{'GigabitEthernet0/0':
{'neighbors':
{'10.16.2.2':
{'address': '10.229.4.2',
'bdr_ip_addr': '10.229.4.2',
'dead_timer': '00:00:34',
'dr_ip_addr': '10.229.4.4',
'first': '0x0(0)/0x0(0)',
'index': '1/1,',
'interface': 'GigabitEthernet0/0',
'neighbor_router_id': '10.16.2.2',
'uptime': '05:07:40',
'next': '0x0(0)/0x0(0)',
'priority': 1,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 1,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 1}}}},
'GigabitEthernet0/1':
{'neighbors':
{'10.36.3.3':
{'address': '10.19.4.3',
'bdr_ip_addr': '10.19.4.3',
'dead_timer': '00:00:33',
'dr_ip_addr': '10.19.4.4',
'first': '0x0(0)/0x0(0)',
'hello_options': '0x2',
'dbd_options': '0x42',
'index': '2/2,',
'interface': 'GigabitEthernet0/1',
'neighbor_router_id': '10.36.3.3',
'uptime': '16:31:06',
'next': '0x0(0)/0x0(0)',
'priority': 1,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 1,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 2}}}}},
'virtual_links':
{'0.0.0.1 10.64.4.4,':
{'neighbors':
{'10.36.3.3':
{'address': '10.229.3.3',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:41',
'dr_ip_addr': '0.0.0.0',
'first': '0x0(0)/0x0(0)',
'hello_options': '0x2',
'dbd_options': '0x42',
'index': '1/3,',
'interface': 'OSPF_VL1',
'neighbor_router_id': '10.36.3.3',
'uptime': '05:07:21',
'next': '0x0(0)/0x0(0)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 1,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 12,
'nbr_retrans_qlen': 0,
'total_retransmission': 3}}}}}}}}}}}}}}
golden_parsed_output3 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1668':
{'areas':
{'0.0.0.0':
{'interfaces':
{'TenGigabitEthernet3/1/1':
{'neighbors':
{'10.196.55.33':
{'address': '10.196.55.33',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:03',
'dr_ip_addr': '10.196.55.38',
'first': '0x0(0)/0x625F62BC(13775196)',
'hello_options': '0x2',
'index': '2/2,',
'interface': 'TenGigabitEthernet3/1/1',
'neighbor_router_id': '10.196.55.33',
'next': '0x0(0)/0x625F62BC(13775196)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 2,
'last_retrans_max_scan_time_msec': 4,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 12,
'nbr_retrans_qlen': 3,
'total_retransmission': 5},
'uptime': '3d21h'}}},
'TenGigabitEthernet3/1/2':
{'neighbors':
{'10.196.55.41':
{'address': '10.196.55.41',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:03',
'dr_ip_addr': '10.196.55.46',
'first': '0x0(0)/0x0(0)',
'hello_options': '0x2',
'index': '1/1,',
'interface': 'TenGigabitEthernet3/1/2',
'neighbor_router_id': '10.196.55.41',
'next': '0x0(0)/0x0(0)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 1,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 22,
'nbr_retrans_qlen': 0,
'total_retransmission': 1},
'uptime': '3d00h'}}},
'TenGigabitEthernet3/1/5':
{'neighbors':
{'10.196.55.49':
{'address': '10.196.55.49',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:03',
'dr_ip_addr': '10.196.55.54',
'first': '0x0(0)/0x625F6304(13775194)',
'hello_options': '0x2',
'index': '3/3,',
'interface': 'TenGigabitEthernet3/1/5',
'neighbor_router_id': '10.196.55.49',
'next': '0x0(0)/0x625F6304(13775194)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 3,
'last_retrans_max_scan_time_msec': 4,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 12,
'nbr_retrans_qlen': 5,
'total_retransmission': 6},
'uptime': '3d00h'}}}}}}},
'1666':
{'areas':
{'0.0.6.130':
{'interfaces':
{'TenGigabitEthernet3/1/3':
{'neighbors':
{'10.196.55.21':
{'address': '10.196.55.21',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:03',
'dr_ip_addr': '10.196.55.26',
'first': '0x0(0)/0x0(0)',
'hello_options': '0x2',
'index': '1/1,',
'interface': 'TenGigabitEthernet3/1/3',
'neighbor_router_id': '10.196.55.21',
'next': '0x0(0)/0x0(0)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 0,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 12,
'nbr_retrans_qlen': 0,
'total_retransmission': 0},
'uptime': '3d00h'}}},
'TenGigabitEthernet3/1/4':
{'neighbors':
{'10.196.55.93':
{'address': '10.196.55.93',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:03',
'dr_ip_addr': '10.196.55.98',
'first': '0x0(0)/0x0(0)',
'hello_options': '0x2',
'index': '2/2,',
'interface': 'TenGigabitEthernet3/1/4',
'neighbor_router_id': '10.196.55.93',
'next': '0x0(0)/0x0(0)',
'priority': 0,
'state': 'full',
'statistics':
{'last_retrans_max_scan_length': 0,
'last_retrans_max_scan_time_msec': 0,
'last_retrans_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'nbr_event_count': 18,
'nbr_retrans_qlen': 0,
'total_retransmission': 0},
'uptime': '3d00h'}}}}}}}}}}}}}
def test_show_ip_ospf_neighbor_detail_full1(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R1_ospf_xe#show ip ospf neighbor detail
Neighbor 10.16.2.2, interface address 10.1.2.2, interface-id unknown
In the area 0 via interface GigabitEthernet2
Neighbor priority is 1, State is FULL, 6 state changes
DR is 10.1.2.1 BDR is 10.1.2.2
Options is 0x2 in Hello (E-bit)
Options is 0x42 in DBD (E-bit, O-bit)
Dead timer due in 00:00:33
Neighbor is up for 08:04:20
Index 1/2/2, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.64.4.4, interface address 10.1.4.4
In the area 0 via interface GigabitEthernet1
Neighbor priority is 1, State is FULL, 6 state changes
DR is 10.1.4.4 BDR is 10.1.4.1
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:35
Neighbor is up for 1d01h
Index 1/1/1, retransmission queue length 0, number of retransmission 1
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.151.22.22, interface address 10.151.22.22
In the area 1 via interface OSPF_SL1
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x42 in DBD (E-bit, O-bit)
Dead timer due in 00:00:35
Neighbor is up for 07:41:59
Index 1/2/2, retransmission queue length 0, number of retransmission 2
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.115.55.55, interface address 10.186.5.5
In the area 1 via interface GigabitEthernet3
Neighbor priority is 1, State is FULL, 6 state changes
DR is 10.186.5.1 BDR is 10.186.5.5
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:34
Neighbor is up for 15:47:14
Index 1/1/1, retransmission queue length 0, number of retransmission 6
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 6
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2_1 = '''\
R1_ospf_xe#show ip ospf interface | i GigabitEthernet2
GigabitEthernet2 is up, line protocol is up
Internet Address 10.1.2.1/24, Interface ID 8, Area 0
Attached via Interface Enable
Process ID 1, Router ID 10.4.1.1, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.4.1.1, Interface address 10.1.2.1
Backup Designated router (ID) 10.16.2.2, Interface address 10.1.2.2
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:05
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/3/3, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 3
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw2_2 = '''\
R1_ospf_xe#show ip ospf interface | i GigabitEthernet1
GigabitEthernet1 is up, line protocol is up
Internet Address 10.1.4.1/24, Interface ID 7, Area 0
Attached via Interface Enable
Process ID 1, Router ID 10.4.1.1, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State BDR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.1.4.4
Backup Designated router (ID) 10.4.1.1, Interface address 10.1.4.1
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:08
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/2/2, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 3, maximum is 3
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.64.4.4 (Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw2_3 = '''
R1_ospf_xe#show ip ospf interface | i OSPF_SL1
OSPF_SL1 is up, line protocol is up
Internet Address 0.0.0.0/0, Interface ID 14, Area 1
Attached via Not Attached
Process ID 2, Router ID 10.229.11.11, Network Type SHAM_LINK, Cost: 111
Topology-MTID Cost Disabled Shutdown Topology Name
0 111 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 1)
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Strict TTL checking enabled, up to 3 hops allowed
Can not be protected by per-prefix Loop-Free FastReroute
Can not be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/2/2, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 5
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.151.22.22
Suppress hello for 0 neighbor(s)
'''
raw2_4 = '''
R1_ospf_xe#show ip ospf interface | i GigabitEthernet3
GigabitEthernet3 is up, line protocol is up
Internet Address 10.186.5.1/24, Interface ID 9, Area 1
Attached via Interface Enable
Process ID 2, Router ID 10.229.11.11, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.229.11.11, Interface address 10.186.5.1
Backup Designated router (ID) 10.115.55.55, Interface address 10.186.5.5
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:08
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/1/1, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 0, maximum is 7
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.115.55.55 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw3_1 = '''\
R1_ospf_xe#show ip ospf sham-links | i OSPF_SL1
Sham Link OSPF_SL1 to address 10.151.22.22 is up
'''
raw3_2 = '''\
R1_ospf_xe#show running-config | i sham-link | i 10.151.22.22
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
'''
raw4_1 = '''\
R1_ospf_xe#show running-config | section router ospf 1
router ospf 1
mpls traffic-eng router-id Loopback0
mpls traffic-eng area 0
'''
raw4_2 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf neighbor detail'] = raw1
self.outputs['show ip ospf interface | section GigabitEthernet2'] = raw2_1
self.outputs['show ip ospf interface | section GigabitEthernet1'] = raw2_2
self.outputs['show ip ospf interface | section OSPF_SL1'] = raw2_3
self.outputs['show ip ospf interface | section GigabitEthernet3'] = raw2_4
self.outputs['show ip ospf sham-links | i OSPF_SL1'] = raw3_1
self.outputs['show running-config | i sham-link | i 10.151.22.22'] = raw3_2
self.outputs['show running-config | section router ospf 1'] = raw4_1
self.outputs['show running-config | section router ospf 2'] = raw4_2
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfNeighborDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_neighbor_detail_full2(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R4_ospf_iosv#show ip ospf neighbor detail (including virtual-link)
Neighbor 10.36.3.3, interface address 10.229.3.3
In the area 0 via interface OSPF_VL1
Neighbor priority is 0, State is FULL, 12 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x42 in DBD (E-bit, O-bit)
Dead timer due in 00:00:41
Neighbor is up for 05:07:21
Index 1/3, retransmission queue length 0, number of retransmission 3
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.36.3.3, interface address 10.19.4.3
In the area 1 via interface GigabitEthernet0/1
Neighbor priority is 1, State is FULL, 6 state changes
DR is 10.19.4.4 BDR is 10.19.4.3
Options is 0x2 in Hello (E-bit)
Options is 0x42 in DBD (E-bit, O-bit)
Dead timer due in 00:00:33
Neighbor is up for 16:31:06
Index 2/2, retransmission queue length 0, number of retransmission 2
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.16.2.2, interface address 10.229.4.2
In the area 1 via interface GigabitEthernet0/0
Neighbor priority is 1, State is FULL, 6 state changes
DR is 10.229.4.4 BDR is 10.229.4.2
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:34
Neighbor is up for 05:07:40
Index 1/1, retransmission queue length 0, number of retransmission 1
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2_1 = '''\
R4_ospf_iosv#show ip ospf interface | section OSPF_VL1
OSPF_VL1 is up, line protocol is up
Internet Address 10.19.4.4/24, Area 0, Attached via Not Attached
Process ID 1, Router ID 10.64.4.4, Network Type VIRTUAL_LINK, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 7)
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 4, Dead 44, Wait 40, Retransmit 5
oob-resync timeout 44
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/6, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 2, maximum is 8
Last flood scan time is 0 msec, maximum is 0 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.36.3.3
Suppress hello for 0 neighbor(s)
'''
raw2_2 = '''\
R4_ospf_iosv#show ip ospf interface | section GigabitEthernet0/1
GigabitEthernet0/1 is up, line protocol is up
Internet Address 10.19.4.4/24, Area 1, Attached via Interface Enable
Process ID 1, Router ID 10.64.4.4, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.19.4.4
Backup Designated router (ID) 10.36.3.3, Interface address 10.19.4.3
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 3/4, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 0, maximum is 11
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.36.3.3 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw2_3 = '''\
R4_ospf_iosv#show ip ospf interface | section GigabitEthernet0/0
GigabitEthernet0/0 is up, line protocol is up
Internet Address 10.229.4.4/24, Area 1, Attached via Interface Enable
Process ID 1, Router ID 10.64.4.4, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.64.4.4, Interface address 10.229.4.4
Backup Designated router (ID) 10.16.2.2, Interface address 10.229.4.2
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:02
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/3, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 0 msec, maximum is 10 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2 (Backup Designated Router)
Suppress hello for 0 neighbor(s)
'''
raw3_1 = '''\
R1_ospf_xe#show ip ospf virtual-links | i OSPF_VL1
Virtual Link OSPF_VL1 to router 10.100.5.5 is down
'''
raw3_2 = '''\
R1_ospf_xe#show running-config | i virtual-link | i 10.100.5.5
area 1 virtual-link 10.100.5.5
'''
raw4_1 = '''\
R1_ospf_xe#show running-config | section router ospf 1
router ospf 1
mpls traffic-eng router-id Loopback0
mpls traffic-eng area 0
'''
raw4_2 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf neighbor detail'] = raw1
self.outputs['show ip ospf interface | section OSPF_VL1'] = raw2_1
self.outputs['show ip ospf interface | section GigabitEthernet0/1'] = raw2_2
self.outputs['show ip ospf interface | section GigabitEthernet0/0'] = raw2_3
self.outputs['show ip ospf virtual-links | i OSPF_VL1'] = raw3_1
self.outputs['show running-config | i virtual-link | i 10.100.5.5'] = raw3_2
self.outputs['show running-config | section router ospf 1'] = raw4_1
self.outputs['show running-config | section router ospf 2'] = raw4_2
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfNeighborDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_neighbor_detail_full3(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
nhq-choke-VSS#sh ip ospf neighbor detail
Neighbor 10.196.55.49, interface address 10.196.55.49
In the area 0 via interface TenGigabitEthernet3/1/5
Neighbor priority is 0, State is FULL, 12 state changes
DR is 10.196.55.54 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x2 in DBD (E-bit)
Dead timer due in 00:00:03
Neighbor is up for 3d00h
Index 3/3, retransmission queue length 5, number of retransmission 6
First 0x0(0)/0x625F6304(13775194) Next 0x0(0)/0x625F6304(13775194)
Last retransmission scan length is 0, maximum is 3
Last retransmission scan time is 0 msec, maximum is 4 msec
Link State retransmission due in 2344 msec
Neighbor 10.196.55.41, interface address 10.196.55.41
In the area 0 via interface TenGigabitEthernet3/1/2
Neighbor priority is 0, State is FULL, 22 state changes
DR is 10.196.55.46 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x2 in DBD (E-bit)
Dead timer due in 00:00:03
Neighbor is up for 3d00h
Index 1/1, retransmission queue length 0, number of retransmission 1
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.196.55.33, interface address 10.196.55.33
In the area 0 via interface TenGigabitEthernet3/1/1
Neighbor priority is 0, State is FULL, 12 state changes
DR is 10.196.55.38 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x2 in DBD (E-bit)
Dead timer due in 00:00:03
Neighbor is up for 3d21h
Index 2/2, retransmission queue length 3, number of retransmission 5
First 0x0(0)/0x625F62BC(13775196) Next 0x0(0)/0x625F62BC(13775196)
Last retransmission scan length is 0, maximum is 2
Last retransmission scan time is 0 msec, maximum is 4 msec
Link State retransmission due in 4356 msec
Neighbor 10.196.55.93, interface address 10.196.55.93
In the area 1666 via interface TenGigabitEthernet3/1/4
Neighbor priority is 0, State is FULL, 18 state changes
DR is 10.196.55.98 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x2 in DBD (E-bit)
Dead timer due in 00:00:03
Neighbor is up for 3d00h
Index 2/2, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.196.55.21, interface address 10.196.55.21
In the area 1666 via interface TenGigabitEthernet3/1/3
Neighbor priority is 0, State is FULL, 12 state changes
DR is 10.196.55.26 BDR is 0.0.0.0
Options is 0x2 in Hello (E-bit)
Options is 0x2 in DBD (E-bit)
Dead timer due in 00:00:03
Neighbor is up for 3d00h
Index 1/1, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2_1 = '''\
nhq-choke-VSS#show ip ospf interface | section TenGigabitEthernet3/1/1
TenGigabitEthernet3/1/1 is up, line protocol is up (connected)
Internet Address 10.196.55.38/29, Area 0, Attached via Interface Enable
Process ID 1668, Router ID 10.21.52.10, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.21.52.10, Interface address 10.196.55.38
No backup designated router on this network
Timer intervals configured, Hello 1, Dead 4, Wait 4, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 1/1, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 2, maximum is 40
Last flood scan time is 0 msec, maximum is 4 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.196.55.33
Suppress hello for 0 neighbor(s)
Cryptographic authentication enabled
Youngest key id is 1
'''
raw2_2 = '''\
nhq-choke-VSS#show ip ospf interface | section TenGigabitEthernet3/1/2
TenGigabitEthernet3/1/2 is up, line protocol is up (connected)
Internet Address 10.196.55.46/29, Area 0, Attached via Interface Enable
Process ID 1668, Router ID 10.21.52.10, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.21.52.10, Interface address 10.196.55.46
No backup designated router on this network
Timer intervals configured, Hello 1, Dead 4, Wait 4, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/2, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 2, maximum is 40
Last flood scan time is 0 msec, maximum is 4 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.196.55.41
Suppress hello for 0 neighbor(s)
Cryptographic authentication enabled
Youngest key id is 1
'''
raw2_3 = '''\
nhq-choke-VSS#show ip ospf interface | section TenGigabitEthernet3/1/3
TenGigabitEthernet3/1/3 is up, line protocol is up (connected)
Internet Address 10.196.55.26/29, Area 1666, Attached via Interface Enable
Process ID 1666, Router ID 10.15.21.9, Network Type BROADCAST, Cost: 1000
Topology-MTID Cost Disabled Shutdown Topology Name
0 1000 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.15.21.9, Interface address 10.196.55.26
No backup designated router on this network
Timer intervals configured, Hello 1, Dead 4, Wait 4, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 2/2, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 3
Last flood scan time is 0 msec, maximum is 4 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.196.55.21
Suppress hello for 0 neighbor(s)
'''
raw2_4 = '''\
nhq-choke-VSS#show ip ospf interface | section TenGigabitEthernet3/1/4
TenGigabitEthernet3/1/4 is up, line protocol is up (connected)
Internet Address 10.196.55.98/29, Area 1666, Attached via Interface Enable
Process ID 1666, Router ID 10.15.21.9, Network Type BROADCAST, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.15.21.9, Interface address 10.196.55.98
No backup designated router on this network
Timer intervals configured, Hello 1, Dead 4, Wait 4, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 3/3, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 3
Last flood scan time is 0 msec, maximum is 4 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.196.55.93
Suppress hello for 0 neighbor(s)
'''
raw2_5 = '''\
nhq-choke-VSS#show ip ospf interface | section TenGigabitEthernet3/1/5
TenGigabitEthernet3/1/5 is up, line protocol is up (connected)
Internet Address 10.196.55.54/29, Area 0, Attached via Interface Enable
Process ID 1668, Router ID 10.21.52.10, Network Type BROADCAST, Cost: 100
Topology-MTID Cost Disabled Shutdown Topology Name
0 100 no no Base
Enabled by interface config, including secondary ip addresses
Transmit Delay is 1 sec, State DR, Priority 1
Designated Router (ID) 10.21.52.10, Interface address 10.196.55.54
No backup designated router on this network
Timer intervals configured, Hello 1, Dead 4, Wait 4, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:00
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Index 3/3, flood queue length 0
Next 0x0(0)/0x0(0)
Last flood scan length is 2, maximum is 40
Last flood scan time is 0 msec, maximum is 4 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.196.55.49
Suppress hello for 0 neighbor(s)
Cryptographic authentication enabled
Youngest key id is 1
'''
raw3_1 = '''\
R1_ospf_xe#show running-config | section router ospf 1666
router ospf 1666
'''
raw3_2 = '''\
R1_ospf_xe#show running-config | section router ospf 1668
router ospf 1668
router-id 10.21.52.10
'''
self.outputs = {}
self.outputs['show ip ospf neighbor detail'] = raw1
self.outputs['show ip ospf interface | section TenGigabitEthernet3/1/1'] = raw2_1
self.outputs['show ip ospf interface | section TenGigabitEthernet3/1/2'] = raw2_2
self.outputs['show ip ospf interface | section TenGigabitEthernet3/1/3'] = raw2_3
self.outputs['show ip ospf interface | section TenGigabitEthernet3/1/4'] = raw2_4
self.outputs['show ip ospf interface | section TenGigabitEthernet3/1/5'] = raw2_5
self.outputs['show running-config | section router ospf 1666'] = raw3_1
self.outputs['show running-config | section router ospf 1668'] = raw3_2
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfNeighborDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output3)
golden_output4 = {'execute.return_value': '''
show ip ospf neighbor detail
Neighbor 10.16.2.2, interface address 192.168.154.2, interface-id 24
In the area 8 via interface GigabitEthernet0/1/2
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
SR adj label 16
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:38
Neighbor is up for 3d16h
Index 1/3/3, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.16.2.2, interface address 192.168.4.2, interface-id 23
In the area 8 via interface GigabitEthernet0/1/1
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
SR adj label 17
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:35
Neighbor is up for 1w0d
Index 1/4/4, retransmission queue length 0, number of retransmission 2
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
'''}
golden_parsed_output4 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'65109': {
'areas': {
'0.0.0.8': {
'interfaces': {
'GigabitEthernet5': {
'neighbors': {
'10.16.2.2': {
'neighbor_router_id': '10.16.2.2',
'interface': 'GigabitEthernet5',
'address': '10.225.0.15',
'interface_id': '11',
'priority': 0,
'state': 'full',
'statistics': {
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 0,
'last_retrans_scan_length': 0,
'last_retrans_max_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'last_retrans_max_scan_time_msec': 0,
},
'dr_ip_addr': '0.0.0.0',
'bdr_ip_addr': '0.0.0.0',
'sr_adj_label': '16',
'dead_timer': '00:00:31',
'uptime': '6d07h',
'index': '1/4/4,',
'first': '0x0(0)/0x0(0)/0x0(0)',
'next': '0x0(0)/0x0(0)/0x0(0)',
},
},
},
'GigabitEthernet4': {
'neighbors': {
'10.16.2.2': {
'neighbor_router_id': '10.16.2.2',
'interface': 'GigabitEthernet4',
'address': '10.225.0.16',
'interface_id': '10',
'priority': 0,
'state': 'full',
'statistics': {
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 0,
'last_retrans_scan_length': 0,
'last_retrans_max_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'last_retrans_max_scan_time_msec': 0,
},
'dr_ip_addr': '0.0.0.0',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:32',
'uptime': '6d07h',
'index': '1/3/3,',
'first': '0x0(0)/0x0(0)/0x0(0)',
'next': '0x0(0)/0x0(0)/0x0(0)',
},
},
},
'GigabitEthernet3': {
'neighbors': {
'10.16.2.2': {
'neighbor_router_id': '10.16.2.2',
'interface': 'GigabitEthernet3',
'address': '10.225.0.17',
'interface_id': '9',
'priority': 0,
'state': 'full',
'statistics': {
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 0,
'last_retrans_scan_length': 0,
'last_retrans_max_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'last_retrans_max_scan_time_msec': 0,
},
'dr_ip_addr': '0.0.0.0',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:34',
'uptime': '6d07h',
'index': '1/2/2,',
'first': '0x0(0)/0x0(0)/0x0(0)',
'next': '0x0(0)/0x0(0)/0x0(0)',
},
},
},
'GigabitEthernet2': {
'neighbors': {
'10.16.2.2': {
'neighbor_router_id': '10.16.2.2',
'interface': 'GigabitEthernet2',
'address': '10.225.0.18',
'interface_id': '8',
'priority': 0,
'state': 'full',
'statistics': {
'nbr_event_count': 6,
'nbr_retrans_qlen': 0,
'total_retransmission': 0,
'last_retrans_scan_length': 0,
'last_retrans_max_scan_length': 0,
'last_retrans_scan_time_msec': 0,
'last_retrans_max_scan_time_msec': 0,
},
'dr_ip_addr': '0.0.0.0',
'bdr_ip_addr': '0.0.0.0',
'dead_timer': '00:00:35',
'uptime': '6d07h',
'index': '1/1/1,',
'first': '0x0(0)/0x0(0)/0x0(0)',
'next': '0x0(0)/0x0(0)/0x0(0)',
},
},
},
},
},
},
},
},
},
},
},
},
}
def test_golden4(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''
Neighbor 10.16.2.2, interface address 10.225.0.15, interface-id 11
In the area 8 via interface GigabitEthernet5
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
SR adj label 16
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:31
Neighbor is up for 6d07h
Index 1/4/4, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.16.2.2, interface address 10.225.0.16, interface-id 10
In the area 8 via interface GigabitEthernet4
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:32
Neighbor is up for 6d07h
Index 1/3/3, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.16.2.2, interface address 10.225.0.17, interface-id 9
In the area 8 via interface GigabitEthernet3
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:34
Neighbor is up for 6d07h
Index 1/2/2, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
Neighbor 10.16.2.2, interface address 10.225.0.18, interface-id 8
In the area 8 via interface GigabitEthernet2
Neighbor priority is 0, State is FULL, 6 state changes
DR is 0.0.0.0 BDR is 0.0.0.0
Options is 0x12 in Hello (E-bit, L-bit)
Options is 0x52 in DBD (E-bit, L-bit, O-bit)
LLS Options is 0x1 (LR)
Dead timer due in 00:00:35
Neighbor is up for 6d07h
Index 1/1/1, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2_1 = '''
show ip ospf interface | section GigabitEthernet5
GigabitEthernet5 is up, line protocol is up
Internet Address 10.225.0.28/30, Interface ID 11, Area 8
Attached via Network Statement
Process ID 65109, Router ID 10.4.1.1, Network Type POINT_TO_POINT, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:01
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/4/4, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 0 msec, maximum is 9 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2
Suppress hello for 0 neighbor(s)
'''
raw2_2 = '''
show ip ospf interface | section GigabitEthernet4
GigabitEthernet4 is up, line protocol is up
Internet Address 10.225.0.29/30, Interface ID 10, Area 8
Attached via Network Statement
Process ID 65109, Router ID 10.4.1.1, Network Type POINT_TO_POINT, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:07
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/3/3, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2
Suppress hello for 0 neighbor(s)
'''
raw2_3 = '''
show ip ospf interface | section GigabitEthernet3
GigabitEthernet3 is up, line protocol is up
Internet Address 10.225.0.30/30, Interface ID 9, Area 8
Attached via Network Statement
Process ID 65109, Router ID 10.4.1.1, Network Type POINT_TO_POINT, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:05
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/2/2, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 1 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2
Suppress hello for 0 neighbor(s)
'''
raw2_4 = '''
show ip ospf interface | section GigabitEthernet2
GigabitEthernet2 is up, line protocol is up
Internet Address 10.225.0.31/30, Interface ID 8, Area 8
Attached via Network Statement
Process ID 65109, Router ID 10.4.1.1, Network Type POINT_TO_POINT, Cost: 1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
Hello due in 00:00:06
Supports Link-local Signaling (LLS)
Cisco NSF helper support enabled
IETF NSF helper support enabled
Can be protected by per-prefix Loop-Free FastReroute
Can be used for per-prefix Loop-Free FastReroute repair paths
Not Protected by per-prefix TI-LFA
Index 1/1/1, flood queue length 0
Next 0x0(0)/0x0(0)/0x0(0)
Last flood scan length is 1, maximum is 10
Last flood scan time is 0 msec, maximum is 1 msec
Neighbor Count is 1, Adjacent neighbor count is 1
Adjacent with neighbor 10.16.2.2
Suppress hello for 0 neighbor(s)
'''
raw3_1 = '''
show running-config | section router ospf 65109
router ospf 65109
router-id 10.4.1.1
network 0.0.0.0 255.255.255.255 area 8
'''
self.outputs = {}
self.outputs['show ip ospf neighbor detail'] = raw1
self.outputs['show ip ospf interface | section GigabitEthernet5'] = raw2_1
self.outputs['show ip ospf interface | section GigabitEthernet4'] = raw2_2
self.outputs['show ip ospf interface | section GigabitEthernet3'] = raw2_3
self.outputs['show ip ospf interface | section GigabitEthernet2'] = raw2_4
self.outputs['show running-config | section router ospf 65109'] = raw3_1
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfNeighborDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output4)
def test_show_ip_ospf_neighbor_detail_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfNeighborDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# =======================================
# Unit test for 'show ip ospf sham-links'
# =======================================
class test_show_ip_ospf_sham_links(unittest.TestCase):
'''Unit test for "show ip ospf sham-links"'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'sham_links':
{'10.229.11.11 10.151.22.22':
{'adjacency_state': 'full',
'cost': 111,
'dcbitless_lsa_count': 1,
'dead_interval': 40,
'demand_circuit': True,
'donotage_lsa': 'not allowed',
'first': '0x0(0)/0x0(0)/0x0(0)',
'hello_interval': 10,
'hello_timer': '00:00:00',
'index': '1/2/2',
'last_retransmission_max_length': 1,
'last_retransmission_max_scan': 0,
'last_retransmission_scan_length': 1,
'last_retransmission_scan_time': 0,
'link_state': 'up',
'local_id': '10.229.11.11',
'name': 'SL0',
'next': '0x0(0)/0x0(0)/0x0(0)',
'remote_id': '10.151.22.22',
'retrans_qlen': 0,
'state': 'point_to_point,',
'ttl_security':
{'enable': True,
'hops': 3},
'total_retransmission': 2,
'transit_area_id': '0.0.0.1',
'wait_interval': 40}}}}}}}}}}}
def test_show_ip_ospf_sham_links_full1(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R1_ospf_xe#show ip ospf sham-links
Sham Link OSPF_SL0 to address 10.151.22.22 is up
Area 1 source address 10.229.11.11
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 1). Cost of using 111 State POINT_TO_POINT,
Timer intervals configured, Hello 10, Dead 40, Wait 40,
Strict TTL checking enabled, up to 3 hops allowed
Hello due in 00:00:00
Adjacency State FULL
Index 1/2/2, retransmission queue length 0, number of retransmission 2
First 0x0(0)/0x0(0)/0x0(0) Next 0x0(0)/0x0(0)/0x0(0)
Last retransmission scan length is 1, maximum is 1
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2 = '''\
R1_ospf_xe#show ip ospf interface | section OSPF_SL0
OSPF_SL0 is down, line protocol is down
Internet Address 0.0.0.0/0, Interface ID 15, Area 1
Attached via Not Attached
Process ID 2, Router ID 10.229.11.11, Network Type SHAM_LINK, Cost: 111
Topology-MTID Cost Disabled Shutdown Topology Name
0 111 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 1)
Transmit Delay is 1 sec, State DOWN
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
'''
raw3 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf sham-links'] = raw1
self.outputs['show ip ospf interface | section OSPF_SL0'] = raw2
self.outputs['show running-config | section router ospf 2'] = raw3
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfShamLinks(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_sham_links_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfShamLinks(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ==========================================
# Unit test for 'show ip ospf virtual-links'
# ==========================================
class test_show_ip_ospf_virtual_links(unittest.TestCase):
'''Unit test for "show ip ospf virtual-links"'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'virtual_links':
{'0.0.0.1 10.36.3.3':
{'adjacency_state': 'full',
'dcbitless_lsa_count': 7,
'dead_interval': 40,
'demand_circuit': True,
'donotage_lsa': 'not allowed',
'first': '0x0(0)/0x0(0)',
'hello_interval': 10,
'hello_timer': '00:00:08',
'index': '1/3',
'interface': 'GigabitEthernet0/1',
'last_retransmission_max_length': 0,
'last_retransmission_max_scan': 0,
'last_retransmission_scan_length': 0,
'last_retransmission_scan_time': 0,
'link_state': 'up',
'name': 'VL0',
'next': '0x0(0)/0x0(0)',
'retrans_qlen': 0,
'retransmit_interval': 5,
'router_id': '10.36.3.3',
'state': 'point-to-point,',
'topology':
{0:
{'cost': 1,
'disabled': False,
'name': 'Base',
'shutdown': False}},
'total_retransmission': 0,
'transit_area_id': '0.0.0.1',
'transmit_delay': 1,
'wait_interval': 40}}}}}}}}}}}
def test_show_ip_ospf_virtual_links_full1(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R4_ospf_iosv#show ip ospf virtual-links
Virtual Link OSPF_VL0 to router 10.36.3.3 is up
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 7).
Transit area 1, via interface GigabitEthernet0/1
Topology-MTID Cost Disabled Shutdown Topology Name
0 1 no no Base
Transmit Delay is 1 sec, State POINT_TO_POINT,
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
Hello due in 00:00:08
Adjacency State FULL
Index 1/3, retransmission queue length 0, number of retransmission 0
First 0x0(0)/0x0(0) Next 0x0(0)/0x0(0)
Last retransmission scan length is 0, maximum is 0
Last retransmission scan time is 0 msec, maximum is 0 msec
'''
raw2 = '''\
R1_ospf_xe#show ip ospf interface | section OSPF_VL0
OSPF_VL0 is down, line protocol is down
Internet Address 0.0.0.0/0, Interface ID 16, Area 0
Attached via Not Attached
Process ID 2, Router ID 10.229.11.11, Network Type VIRTUAL_LINK, Cost: 65535
Topology-MTID Cost Disabled Shutdown Topology Name
0 65535 no no Base
Configured as demand circuit
Run as demand circuit
DoNotAge LSA not allowed (Number of DCbitless LSA is 1)
Transmit Delay is 1 sec, State DOWN
Timer intervals configured, Hello 10, Dead 40, Wait 40, Retransmit 5
oob-resync timeout 40
'''
raw3 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
area 1 virtual-link 10.100.5.5
area 1 sham-link 10.229.11.11 10.151.22.22 cost 111 ttl-security hops 3
redistribute bgp
'''
self.outputs = {}
self.outputs['show ip ospf virtual-links'] = raw1
self.outputs['show ip ospf interface | section OSPF_VL0'] = raw2
self.outputs['show running-config | section router ospf 2'] = raw3
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfVirtualLinks(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_sham_links_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfVirtualLinks(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# =====================================
# Unit test for 'show ip ospf database'
# =====================================
class test_show_ip_ospf_database(unittest.TestCase):
'''Unit test for "show ip ospf database" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.9':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.4.1.1',
'ospfv2':
{'header':
{'adv_router': '10.4.1.1',
'age': 167,
'checksum': '0x00D8C6',
'link_count': 1,
'lsa_id': '10.4.1.1',
'seq_num': '0x8000000D'}}}}}}}}}},
'65109':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'172.31.197.252':
{'adv_router': '172.31.197.252',
'lsa_id': '172.31.197.252',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1802,
'checksum': '0x007F23',
'link_count': 5,
'lsa_id': '172.31.197.252',
'seq_num': '0x80000161'}}},
'172.31.197.253':
{'adv_router': '172.31.197.253',
'lsa_id': '172.31.197.253',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 1784,
'checksum': '0x00CC34',
'link_count': 8,
'lsa_id': '172.31.197.253',
'seq_num': '0x80000103'}}},
'172.31.197.254':
{'adv_router': '172.31.197.254',
'lsa_id': '172.31.197.254',
'ospfv2':
{'header':
{'adv_router': '172.31.197.254',
'age': 1675,
'checksum': '0x007A61',
'link_count': 3,
'lsa_id': '172.31.197.254',
'seq_num': '0x8000000C'}}},
'192.168.255.0':
{'adv_router': '192.168.255.0',
'lsa_id': '192.168.255.0',
'ospfv2':
{'header':
{'adv_router': '192.168.255.0',
'age': 226,
'checksum': '0x006975',
'link_count': 501,
'lsa_id': '192.168.255.0',
'seq_num': '0x80000069'}}},
'192.168.165.119':
{'adv_router': '192.168.165.119',
'lsa_id': '192.168.165.119',
'ospfv2':
{'header':
{'adv_router': '192.168.165.119',
'age': 1089,
'checksum': '0x0080E0',
'link_count': 2,
'lsa_id': '192.168.165.119',
'seq_num': '0x80000029'}}},
'192.168.165.120':
{'adv_router': '192.168.165.120',
'lsa_id': '192.168.165.120',
'ospfv2':
{'header':
{'adv_router': '192.168.165.120',
'age': 1482,
'checksum': '0x0063CB',
'link_count': 2,
'lsa_id': '192.168.165.120',
'seq_num': '0x80000033'}}},
'192.168.165.220':
{'adv_router': '192.168.165.220',
'lsa_id': '192.168.165.220',
'ospfv2':
{'header':
{'adv_router': '192.168.165.220',
'age': 525,
'checksum': '0x004E8E',
'link_count': 3,
'lsa_id': '192.168.165.220',
'seq_num': '0x800000DE'}}},
'10.22.102.64':
{'adv_router': '10.22.102.64',
'lsa_id': '10.22.102.64',
'ospfv2':
{'header':
{'adv_router': '10.22.102.64',
'age': 2794,
'checksum': '0x002254',
'link_count': 3,
'lsa_id': '10.22.102.64',
'seq_num': '0x80000043'}}}}},
2:
{'lsa_type': 2,
'lsas':
{'172.31.197.102':
{'adv_router': '192.168.165.220',
'lsa_id': '172.31.197.102',
'ospfv2':
{'header':
{'adv_router': '192.168.165.220',
'age': 525,
'checksum': '0x0094CD',
'lsa_id': '172.31.197.102',
'seq_num': '0x80000058'}}},
'172.31.197.93':
{'adv_router': '172.31.197.252',
'lsa_id': '172.31.197.93',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1802,
'checksum': '0x002D67',
'lsa_id': '172.31.197.93',
'seq_num': '0x80000008'}}},
'172.31.197.97':
{'adv_router': '172.31.197.253',
'lsa_id': '172.31.197.97',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 1356,
'checksum': '0x000D83',
'lsa_id': '172.31.197.97',
'seq_num': '0x80000006'}}},
'192.168.255.0':
{'adv_router': '172.31.197.253',
'lsa_id': '192.168.255.0',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 2213,
'checksum': '0x00D374',
'lsa_id': '192.168.255.0',
'seq_num': '0x80000BA8'}}},
'10.1.1.2':
{'adv_router': '172.31.197.253',
'lsa_id': '10.1.1.2',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 70,
'checksum': '0x0015EF',
'lsa_id': '10.1.1.2',
'seq_num': '0x8000003F'}}},
'192.168.165.49':
{'adv_router': '172.31.197.253',
'lsa_id': '192.168.165.49',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 499,
'checksum': '0x005CBC',
'lsa_id': '192.168.165.49',
'seq_num': '0x8000001A'}}},
'192.168.165.57':
{'adv_router': '172.31.197.253',
'lsa_id': '192.168.165.57',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 927,
'checksum': '0x0008FE',
'lsa_id': '192.168.165.57',
'seq_num': '0x80000023'}}},
'10.22.102.49':
{'adv_router': '172.31.197.252',
'lsa_id': '10.22.102.49',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 289,
'checksum': '0x007AD0',
'lsa_id': '10.22.102.49',
'seq_num': '0x8000005B'}}},
'10.22.102.57':
{'adv_router': '172.31.197.253',
'lsa_id': '10.22.102.57',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 2641,
'checksum': '0x0062F8',
'lsa_id': '10.22.102.57',
'seq_num': '0x80000041'}}}}}}}}}}}}}}}}
golden_output1 = {'execute.return_value': '''
Router#show ip ospf database
Load for five secs: 71%/0%; one minute: 11%; five minutes: 9%
Time source is NTP, 20:29:26.348 EST Fri Nov 11 2016
OSPF Router with ID (172.31.197.254) (Process ID 65109)
Router Link States (Area 0)
Link ID ADV Router Age Seq# Checksum Link count
10.22.102.64 10.22.102.64 2794 0x80000043 0x002254 3
172.31.197.252 172.31.197.252 1802 0x80000161 0x007F23 5
172.31.197.253 172.31.197.253 1784 0x80000103 0x00CC34 8
172.31.197.254 172.31.197.254 1675 0x8000000C 0x007A61 3
192.168.255.0 192.168.255.0 226 0x80000069 0x006975 501
192.168.165.119 192.168.165.119 1089 0x80000029 0x0080E0 2
192.168.165.120 192.168.165.120 1482 0x80000033 0x0063CB 2
192.168.165.220 192.168.165.220 525 0x800000DE 0x004E8E 3
Net Link States (Area 0)
Link ID ADV Router Age Seq# Checksum
10.1.1.2 172.31.197.253 70 0x8000003F 0x0015EF
10.22.102.49 172.31.197.252 289 0x8000005B 0x007AD0
10.22.102.57 172.31.197.253 2641 0x80000041 0x0062F8
172.31.197.93 172.31.197.252 1802 0x80000008 0x002D67
172.31.197.97 172.31.197.253 1356 0x80000006 0x000D83
172.31.197.102 192.168.165.220 525 0x80000058 0x0094CD
192.168.255.0 172.31.197.253 2213 0x80000BA8 0x00D374
192.168.165.49 172.31.197.253 499 0x8000001A 0x005CBC
192.168.165.57 172.31.197.253 927 0x80000023 0x0008FE
OSPF Router with ID (10.4.1.1) (Process ID 1)
Router Link States (Area 9)
Link ID ADV Router Age Seq# Checksum Link count
10.4.1.1 10.4.1.1 167 0x8000000D 0x00D8C6 1
Router#
'''}
golden_parsed_output2 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'65109':
{'areas':
{'0.0.0.8':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'192.168.165.220':
{'adv_router': '192.168.165.220',
'lsa_id': '192.168.165.220',
'ospfv2':
{'header':
{'adv_router': '192.168.165.220',
'age': 113,
'checksum': '0x007C93',
'link_count': 2,
'lsa_id': '192.168.165.220',
'seq_num': '0x800006E3'}}},
'192.168.255.0':
{'adv_router': '192.168.255.0',
'lsa_id': '192.168.255.0',
'ospfv2':
{'header':
{'adv_router': '192.168.255.0',
'age': 1407,
'checksum': '0x00ADD6',
'link_count': 501,
'lsa_id': '192.168.255.0',
'seq_num': '0x800007BC'}}},
'10.22.102.64':
{'adv_router': '10.22.102.64',
'lsa_id': '10.22.102.64',
'ospfv2':
{'header':
{'adv_router': '10.22.102.64',
'age': 2220,
'checksum': '0x008BD8',
'link_count': 3,
'lsa_id': '10.22.102.64',
'seq_num': '0x800003EC'}}},
'172.31.197.252':
{'adv_router': '172.31.197.252',
'lsa_id': '172.31.197.252',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1272,
'checksum': '0x00B9E5',
'link_count': 6,
'lsa_id': '172.31.197.252',
'seq_num': '0x80000DBD'}}},
'172.31.197.253':
{'adv_router': '172.31.197.253',
'lsa_id': '172.31.197.253',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 663,
'checksum': '0x00FFD8',
'link_count': 4,
'lsa_id': '172.31.197.253',
'seq_num': '0x8000009D'}}},
'172.31.197.254':
{'adv_router': '172.31.197.254',
'lsa_id': '172.31.197.254',
'ospfv2':
{'header':
{'adv_router': '172.31.197.254',
'age': 1900,
'checksum': '0x00D029',
'link_count': 3,
'lsa_id': '172.31.197.254',
'seq_num': '0x800000D9'}}}}},
2:
{'lsa_type': 2,
'lsas':
{'192.168.255.0':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.255.0',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 26,
'checksum': '0x009E8D',
'lsa_id': '192.168.255.0',
'seq_num': '0x800000D1'}}},
'10.22.102.50':
{'adv_router': '10.22.102.64',
'lsa_id': '10.22.102.50',
'ospfv2':
{'header':
{'adv_router': '10.22.102.64',
'age': 220,
'checksum': '0x003A0A',
'lsa_id': '10.22.102.50',
'seq_num': '0x800000AD'}}},
'10.22.102.58':
{'adv_router': '10.22.102.64',
'lsa_id': '10.22.102.58',
'ospfv2':
{'header':
{'adv_router': '10.22.102.64',
'age': 1220,
'checksum': '0x00E2CD',
'lsa_id': '10.22.102.58',
'seq_num': '0x80000038'}}},
'172.31.197.102':
{'adv_router': '192.168.165.220',
'lsa_id': '172.31.197.102',
'ospfv2':
{'header':
{'adv_router': '192.168.165.220',
'age': 113,
'checksum': '0x009ACA',
'lsa_id': '172.31.197.102',
'seq_num': '0x80000055'}}},
'172.31.197.94':
{'adv_router': '172.31.197.254',
'lsa_id': '172.31.197.94',
'ospfv2':
{'header':
{'adv_router': '172.31.197.254',
'age': 911,
'checksum': '0x007ACC',
'lsa_id': '172.31.197.94',
'seq_num': '0x80000052'}}},
'172.31.197.97':
{'adv_router': '172.31.197.253',
'lsa_id': '172.31.197.97',
'ospfv2':
{'header':
{'adv_router': '172.31.197.253',
'age': 663,
'checksum': '0x00AAB4',
'lsa_id': '172.31.197.97',
'seq_num': '0x80000037'}}}}},
3: {'lsa_type': 3,
'lsas':
{'192.168.165.119':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.119',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1030,
'checksum': '0x007847',
'lsa_id': '192.168.165.119',
'seq_num': '0x800000D4'}}},
'192.168.165.120':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.120',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 26,
'checksum': '0x005160',
'lsa_id': '192.168.165.120',
'seq_num': '0x800003DE'}}},
'192.168.165.48':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.48',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 26,
'checksum': '0x0006F6',
'lsa_id': '192.168.165.48',
'seq_num': '0x800003DF'}}},
'192.168.165.56':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.56',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1779,
'checksum': '0x00D42E',
'lsa_id': '192.168.165.56',
'seq_num': '0x800000D4'}}}}},
4: {'lsa_type': 4,
'lsas':
{'192.168.165.119':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.119',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 1030,
'checksum': '0x00605F',
'lsa_id': '192.168.165.119',
'seq_num': '0x800000D4'}}},
'192.168.165.120':
{'adv_router': '172.31.197.252',
'lsa_id': '192.168.165.120',
'ospfv2':
{'header':
{'adv_router': '172.31.197.252',
'age': 26,
'checksum': '0x003978',
'lsa_id': '192.168.165.120',
'seq_num': '0x800003DE'}}}}}}}}}}}}}}}}
golden_output2 = {'execute.return_value': '''
Router#show ip ospf database
Load for five secs: 1%/0%; one minute: 4%; five minutes: 6%
Time source is NTP, 15:40:22.269 EST Sun Nov 6 2016
OSPF Router with ID (172.31.197.254) (Process ID 65109)
Router Link States (Area 8)
Link ID ADV Router Age Seq# Checksum Link count
10.22.102.64 10.22.102.64 2220 0x800003EC 0x008BD8 3
172.31.197.252 172.31.197.252 1272 0x80000DBD 0x00B9E5 6
172.31.197.253 172.31.197.253 663 0x8000009D 0x00FFD8 4
172.31.197.254 172.31.197.254 1900 0x800000D9 0x00D029 3
192.168.255.0 192.168.255.0 1407 0x800007BC 0x00ADD6 501
192.168.165.220 192.168.165.220 113 0x800006E3 0x007C93 2
Net Link States (Area 8)
Link ID ADV Router Age Seq# Checksum
10.22.102.50 10.22.102.64 220 0x800000AD 0x003A0A
10.22.102.58 10.22.102.64 1220 0x80000038 0x00E2CD
172.31.197.94 172.31.197.254 911 0x80000052 0x007ACC
172.31.197.97 172.31.197.253 663 0x80000037 0x00AAB4
172.31.197.102 192.168.165.220 113 0x80000055 0x009ACA
192.168.255.0 172.31.197.252 26 0x800000D1 0x009E8D
Summary Net Link States (Area 8)
Link ID ADV Router Age Seq# Checksum
192.168.165.48 172.31.197.252 26 0x800003DF 0x0006F6
192.168.165.56 172.31.197.252 1779 0x800000D4 0x00D42E
192.168.165.119 172.31.197.252 1030 0x800000D4 0x007847
192.168.165.120 172.31.197.252 26 0x800003DE 0x005160
Summary ASB Link States (Area 8)
Link ID ADV Router Age Seq# Checksum
192.168.165.119 172.31.197.252 1030 0x800000D4 0x00605F
192.168.165.120 172.31.197.252 26 0x800003DE 0x003978
Router#
'''}
golden_parsed_output3 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'65109':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'192.168.101.2':
{'adv_router': '192.168.101.2',
'lsa_id': '192.168.101.2',
'ospfv2':
{'header':
{'adv_router': '192.168.101.2',
'age': 1548,
'checksum': '0x007D6B',
'link_count': 501,
'lsa_id': '192.168.101.2',
'seq_num': '0x8000005F'}}},
'192.168.135.119':
{'adv_router': '192.168.135.119',
'lsa_id': '192.168.135.119',
'ospfv2':
{'header':
{'adv_router': '192.168.135.119',
'age': 533,
'checksum': '0x0090D8',
'link_count': 2,
'lsa_id': '192.168.135.119',
'seq_num': '0x80000021'}}},
'192.168.135.120':
{'adv_router': '192.168.135.120',
'lsa_id': '192.168.135.120',
'ospfv2':
{'header':
{'adv_router': '192.168.135.120',
'age': 919,
'checksum': '0x0073C3',
'link_count': 2,
'lsa_id': '192.168.135.120',
'seq_num': '0x8000002B'}}},
'192.168.135.220':
{'adv_router': '192.168.135.220',
'lsa_id': '192.168.135.220',
'ospfv2':
{'header':
{'adv_router': '192.168.135.220',
'age': 2014,
'checksum': '0x006085',
'link_count': 3,
'lsa_id': '192.168.135.220',
'seq_num': '0x800000D5'}}},
'10.22.102.64':
{'adv_router': '10.22.102.64',
'lsa_id': '10.22.102.64',
'ospfv2':
{'header':
{'adv_router': '10.22.102.64',
'age': 1111,
'checksum': '0x002C4F',
'link_count': 3,
'lsa_id': '10.22.102.64',
'seq_num': '0x8000003E'}}},
'192.168.178.142':
{'adv_router': '192.168.178.142',
'lsa_id': '192.168.178.142',
'ospfv2':
{'header':
{'adv_router': '192.168.178.142',
'age': 768,
'checksum': '0x00C5E7',
'link_count': 5,
'lsa_id': '192.168.178.142',
'seq_num': '0x80000155'}}},
'192.168.198.253':
{'adv_router': '192.168.198.253',
'lsa_id': '192.168.198.253',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 657,
'checksum': '0x00E328',
'link_count': 8,
'lsa_id': '192.168.198.253',
'seq_num': '0x800000F8'}}},
'192.168.198.254':
{'adv_router': '192.168.198.254',
'lsa_id': '192.168.198.254',
'ospfv2':
{'header':
{'adv_router': '192.168.198.254',
'age': 656,
'checksum': '0x007A2A',
'link_count': 3,
'lsa_id': '192.168.198.254',
'seq_num': '0x8000002F'}}}}},
2:
{'lsa_type': 2,
'lsas':
{'192.168.101.2':
{'adv_router': '192.168.198.253',
'lsa_id': '192.168.101.2',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 2501,
'checksum': '0x00DF6E',
'lsa_id': '192.168.101.2',
'seq_num': '0x80000BA2'}}},
'192.168.135.49':
{'adv_router': '192.168.198.253',
'lsa_id': '192.168.135.49',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 1006,
'checksum': '0x0068B6',
'lsa_id': '192.168.135.49',
'seq_num': '0x80000014'}}},
'192.168.135.57':
{'adv_router': '192.168.198.253',
'lsa_id': '192.168.135.57',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 2072,
'checksum': '0x0014F8',
'lsa_id': '192.168.135.57',
'seq_num': '0x8000001D'}}},
'10.22.102.49':
{'adv_router': '192.168.178.142',
'lsa_id': '10.22.102.49',
'ospfv2':
{'header':
{'adv_router': '192.168.178.142',
'age': 1763,
'checksum': '0x008CC7',
'lsa_id': '10.22.102.49',
'seq_num': '0x80000052'}}},
'10.22.102.57':
{'adv_router': '192.168.198.253',
'lsa_id': '10.22.102.57',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 89,
'checksum': '0x006CF3',
'lsa_id': '10.22.102.57',
'seq_num': '0x8000003C'}}},
'10.1.1.2':
{'adv_router': '192.168.198.253',
'lsa_id': '10.1.1.2',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 547,
'checksum': '0x0021E9',
'lsa_id': '10.1.1.2',
'seq_num': '0x80000039'}}},
'192.168.198.102':
{'adv_router': '192.168.135.220',
'lsa_id': '192.168.198.102',
'ospfv2':
{'header':
{'adv_router': '192.168.135.220',
'age': 2014,
'checksum': '0x00A6C4',
'lsa_id': '192.168.198.102',
'seq_num': '0x8000004F'}}},
'192.168.198.94':
{'adv_router': '192.168.198.254',
'lsa_id': '192.168.198.94',
'ospfv2':
{'header':
{'adv_router': '192.168.198.254',
'age': 639,
'checksum': '0x001B7C',
'lsa_id': '192.168.198.94',
'seq_num': '0x80000002'}}},
'192.168.198.97':
{'adv_router': '192.168.198.253',
'lsa_id': '192.168.198.97',
'ospfv2':
{'header':
{'adv_router': '192.168.198.253',
'age': 657,
'checksum': '0x00177E',
'lsa_id': '192.168.198.97',
'seq_num': '0x80000001'}}}}}}}}}}}}}}}}
golden_output3 = {'execute.return_value': '''
1006#show ip ospf database
Load for five secs: 0%/0%; one minute: 0%; five minutes: 0%
Time source is NTP, 15:51:24.610 EST Fri Nov 11 2016
OSPF Router with ID (192.168.178.142) (Process ID 65109)
Router Link States (Area 0)
Link ID ADV Router Age Seq# Checksum Link count
10.22.102.64 10.22.102.64 1111 0x8000003E 0x002C4F 3
192.168.178.142 192.168.178.142 768 0x80000155 0x00C5E7 5
192.168.198.253 192.168.198.253 657 0x800000F8 0x00E328 8
192.168.198.254 192.168.198.254 656 0x8000002F 0x007A2A 3
192.168.101.2 192.168.101.2 1548 0x8000005F 0x007D6B 501
192.168.135.119 192.168.135.119 533 0x80000021 0x0090D8 2
192.168.135.120 192.168.135.120 919 0x8000002B 0x0073C3 2
192.168.135.220 192.168.135.220 2014 0x800000D5 0x006085 3
Net Link States (Area 0)
Link ID ADV Router Age Seq# Checksum
10.1.1.2 192.168.198.253 547 0x80000039 0x0021E9
10.22.102.49 192.168.178.142 1763 0x80000052 0x008CC7
10.22.102.57 192.168.198.253 89 0x8000003C 0x006CF3
192.168.198.94 192.168.198.254 639 0x80000002 0x001B7C
192.168.198.97 192.168.198.253 657 0x80000001 0x00177E
192.168.198.102 192.168.135.220 2014 0x8000004F 0x00A6C4
192.168.101.2 192.168.198.253 2501 0x80000BA2 0x00DF6E
192.168.135.49 192.168.198.253 1006 0x80000014 0x0068B6
192.168.135.57 192.168.198.253 2072 0x8000001D 0x0014F8
1006#
'''}
def test_show_ip_ospf_database_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_full2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_database_full3(self):
self.maxDiff = None
self.device = Mock(**self.golden_output3)
obj = ShowIpOspfDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output3)
def test_show_ip_ospf_database_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabase(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ============================================
# Unit test for 'show ip ospf database router'
# ============================================
class test_show_ip_ospf_database_router(unittest.TestCase):
'''Unit test for "show ip ospf database router" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'10.4.1.1 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.4.1.1',
'ospfv2':
{'body':
{'router':
{'links':
{'10.4.1.1':
{'link_data': '255.255.255.255',
'link_id': '10.4.1.1',
'num_mtid_metrics': 2,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0},
32:
{'metric': 1,
'mt_id': 32},
33:
{'metric': 1,
'mt_id': 33}},
'type': 'stub network'},
'10.1.2.1':
{'link_data': '10.1.2.1',
'link_id': '10.1.2.1',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.1.4.4':
{'link_data': '10.1.4.1',
'link_id': '10.1.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'}},
'num_of_links': 3}},
'header':
{'adv_router': '10.4.1.1',
'age': 742,
'checksum': '0x6228',
'length': 60,
'lsa_id': '10.4.1.1',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000003D',
'type': 1}}},
'10.16.2.2 10.16.2.2':
{'adv_router': '10.16.2.2',
'lsa_id': '10.16.2.2',
'ospfv2':
{'body':
{'router':
{'links':
{'10.1.2.1':
{'link_data': '10.1.2.2',
'link_id': '10.1.2.1',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.2.3.3':
{'link_data': '10.2.3.2',
'link_id': '10.2.3.3',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.2.4.4':
{'link_data': '10.2.4.2',
'link_id': '10.2.4.4',
'num_mtid_metrics': 0,
'topologies':
{0: {'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.16.2.2':
{'link_data': '255.255.255.255',
'link_id': '10.16.2.2',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 4}},
'header':
{'adv_router': '10.16.2.2',
'age': 1520,
'checksum': '0x672A',
'length': 72,
'lsa_id': '10.16.2.2',
'option': 'None',
'option_desc': 'No TOS-capability, No DC',
'seq_num': '80000013',
'type': 1}}},
'10.36.3.3 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.36.3.3',
'ospfv2':
{'body':
{'router':
{'links':
{'10.2.3.3':
{'link_data': '10.2.3.3',
'link_id': '10.2.3.3',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.3.4.4':
{'link_data': '10.3.4.3',
'link_id': '10.3.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.36.3.3':
{'link_data': '255.255.255.255',
'link_id': '10.36.3.3',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 3}},
'header':
{'adv_router': '10.36.3.3',
'age': 235,
'checksum': '0x75F8',
'length': 60,
'lsa_id': '10.36.3.3',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000033',
'type': 1}}},
'10.64.4.4 10.64.4.4':
{'adv_router': '10.64.4.4',
'lsa_id': '10.64.4.4',
'ospfv2':
{'body':
{'router':
{'links':
{'10.1.4.4':
{'link_data': '10.1.4.4',
'link_id': '10.1.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.2.4.4':
{'link_data': '10.2.4.4',
'link_id': '10.2.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.3.4.4':
{'link_data': '10.3.4.4',
'link_id': '10.3.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.64.4.4':
{'link_data': '255.255.255.255',
'link_id': '10.64.4.4',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 4}},
'header':
{'adv_router': '10.64.4.4',
'age': 1486,
'as_boundary_router': True,
'checksum': '0xA57C',
'length': 72,
'lsa_id': '10.64.4.4',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '80000036',
'type': 1}}}}}}}}}},
'2':
{'areas':
{'0.0.0.1':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'10.229.11.11 10.229.11.11':
{'adv_router': '10.229.11.11',
'lsa_id': '10.229.11.11',
'ospfv2':
{'body':
{'router':
{'links':
{'10.186.5.1':
{'link_data': '10.186.5.1',
'link_id': '10.186.5.1',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.151.22.22':
{'link_data': '0.0.0.14',
'link_id': '10.151.22.22',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 111,
'mt_id': 0,
'tos': 0}},
'type': 'another router (point-to-point)'}},
'num_of_links': 2}},
'header':
{'adv_router': '10.229.11.11',
'age': 651,
'area_border_router': True,
'as_boundary_router': True,
'checksum': '0x9CE3',
'length': 48,
'lsa_id': '10.229.11.11',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000003E',
'type': 1}}},
'10.151.22.22 10.151.22.22':
{'adv_router': '10.151.22.22',
'lsa_id': '10.151.22.22',
'ospfv2':
{'body':
{'router':
{'links':
{'10.229.11.11':
{'link_data': '0.0.0.6',
'link_id': '10.229.11.11',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'another router (point-to-point)'},
'10.229.6.6':
{'link_data': '10.229.6.2',
'link_id': '10.229.6.6',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 40,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'}},
'num_of_links': 2}},
'header':
{'adv_router': '10.151.22.22',
'age': 480,
'area_border_router': True,
'as_boundary_router': True,
'checksum': '0xC41A',
'length': 48,
'lsa_id': '10.151.22.22',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'No '
'DC',
'seq_num': '80000019',
'type': 1}}},
'10.36.3.3 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.36.3.3',
'ospfv2':
{'body':
{'router':
{'links':
{'10.19.7.7':
{'link_data': '10.19.7.3',
'link_id': '10.19.7.7',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'}},
'num_of_links': 1}},
'header':
{'adv_router': '10.36.3.3',
'age': 1128,
'area_border_router': True,
'as_boundary_router': True,
'checksum': '0x5845',
'length': 36,
'lsa_id': '10.36.3.3',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000035',
'type': 1}}},
'10.115.55.55 10.115.55.55':
{'adv_router': '10.115.55.55',
'lsa_id': '10.115.55.55',
'ospfv2':
{'body':
{'router':
{'links':
{'10.186.5.1':
{'link_data': '10.186.5.5',
'link_id': '10.186.5.1',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.115.6.6':
{'link_data': '10.115.6.5',
'link_id': '10.115.6.6',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 30,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.115.55.55':
{'link_data': '255.255.255.255',
'link_id': '10.115.55.55',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 3}},
'header':
{'adv_router': '10.115.55.55',
'age': 318,
'checksum': '0xE7BC',
'length': 60,
'lsa_id': '10.115.55.55',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000037',
'type': 1}}},
'10.84.66.66 10.84.66.66':
{'adv_router': '10.84.66.66',
'lsa_id': '10.84.66.66',
'ospfv2':
{'body':
{'router':
{'links':
{'10.229.6.6':
{'link_data': '10.229.6.6',
'link_id': '10.229.6.6',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.115.6.6':
{'link_data': '10.115.6.6',
'link_id': '10.115.6.6',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 30,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.166.7.6':
{'link_data': '10.166.7.6',
'link_id': '10.166.7.6',
'num_mtid_metrics': 0,
'topologies': {0: {'metric': 30,
'mt_id': 0,
'tos': 0}},
'type': 'transit '
'network'},
'10.84.66.66': {'link_data': '255.255.255.255',
'link_id': '10.84.66.66',
'num_mtid_metrics': 0,
'topologies': {0: {'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 4}},
'header':
{'adv_router': '10.84.66.66',
'age': 520,
'checksum': '0x1282',
'length': 72,
'lsa_id': '10.84.66.66',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '8000003C',
'type': 1}}},
'10.1.77.77 10.1.77.77':
{'adv_router': '10.1.77.77',
'lsa_id': '10.1.77.77',
'ospfv2':
{'body':
{'router':
{'links':
{'10.19.7.7':
{'link_data': '10.19.7.7',
'link_id': '10.19.7.7',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.166.7.6':
{'link_data': '10.166.7.7',
'link_id': '10.166.7.6',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 30,
'mt_id': 0,
'tos': 0}},
'type': 'transit network'},
'10.1.77.77':
{'link_data': '255.255.255.255',
'link_id': '10.1.77.77',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 3}},
'header':
{'adv_router': '10.1.77.77',
'age': 288,
'checksum': '0x1379',
'length': 60,
'lsa_id': '10.1.77.77',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000030',
'type': 1}}}}}}}}}},
'3':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'10.115.11.11 10.115.11.11':
{'adv_router': '10.115.11.11',
'lsa_id': '10.115.11.11',
'ospfv2':
{'body':
{'router':
{'links':
{'10.115.11.11':
{'link_data': '255.255.255.255',
'link_id': '10.115.11.11',
'num_mtid_metrics': 0,
'topologies':
{0:
{'metric': 1,
'mt_id': 0,
'tos': 0}},
'type': 'stub network'}},
'num_of_links': 1}},
'header':
{'adv_router': '10.115.11.11',
'age': 50,
'as_boundary_router': True,
'checksum': '0x881A',
'length': 36,
'lsa_id': '10.115.11.11',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000001',
'type': 1}}}}}}}},
'0.0.0.11':
{'database':
{'lsa_types':
{1:
{'lsa_type': 1,
'lsas':
{'10.115.11.11 10.115.11.11':
{'adv_router': '10.115.11.11',
'lsa_id': '10.115.11.11',
'ospfv2':
{'body':
{'router':
{'num_of_links': 0}},
'header':
{'adv_router': '10.115.11.11',
'age': 8,
'as_boundary_router': True,
'checksum': '0x1D1B',
'length': 24,
'lsa_id': '10.115.11.11',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000001',
'type': 1}}}}}}}}}}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf database router
OSPF Router with ID (10.4.1.1) (Process ID 1)
Router Link States (Area 0)
LS age: 742
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.4.1.1
Advertising Router: 10.4.1.1
LS Seq Number: 8000003D
Checksum: 0x6228
Length: 60
Number of Links: 3
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.4.1.1
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 2
TOS 0 Metrics: 1
MTID 32 Metrics: 1
MTID 33 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.1.2.1
(Link Data) Router Interface address: 10.1.2.1
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.1.4.4
(Link Data) Router Interface address: 10.1.4.1
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 1520
Options: (No TOS-capability, No DC)
LS Type: Router Links
Link State ID: 10.16.2.2
Advertising Router: 10.16.2.2
LS Seq Number: 80000013
Checksum: 0x672A
Length: 72
Number of Links: 4
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.16.2.2
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.2.3.3
(Link Data) Router Interface address: 10.2.3.2
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.2.4.4
(Link Data) Router Interface address: 10.2.4.2
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.1.2.1
(Link Data) Router Interface address: 10.1.2.2
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 235
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.36.3.3
Advertising Router: 10.36.3.3
LS Seq Number: 80000033
Checksum: 0x75F8
Length: 60
Number of Links: 3
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.36.3.3
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.3.4.4
(Link Data) Router Interface address: 10.3.4.3
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.2.3.3
(Link Data) Router Interface address: 10.2.3.3
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 1486
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.64.4.4
Advertising Router: 10.64.4.4
LS Seq Number: 80000036
Checksum: 0xA57C
Length: 72
AS Boundary Router
Number of Links: 4
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.64.4.4
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.2.4.4
(Link Data) Router Interface address: 10.2.4.4
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.3.4.4
(Link Data) Router Interface address: 10.3.4.4
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.1.4.4
(Link Data) Router Interface address: 10.1.4.4
Number of MTID metrics: 0
TOS 0 Metrics: 1
OSPF Router with ID (10.229.11.11) (Process ID 2)
Router Link States (Area 1)
LS age: 1128
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.36.3.3
Advertising Router: 10.36.3.3
LS Seq Number: 80000035
Checksum: 0x5845
Length: 36
Area Border Router
AS Boundary Router
Number of Links: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.19.7.7
(Link Data) Router Interface address: 10.19.7.3
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 651
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.229.11.11
Advertising Router: 10.229.11.11
LS Seq Number: 8000003E
Checksum: 0x9CE3
Length: 48
Area Border Router
AS Boundary Router
Number of Links: 2
Link connected to: another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.151.22.22
(Link Data) Router Interface address: 0.0.0.14
Number of MTID metrics: 0
TOS 0 Metrics: 111
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.186.5.1
(Link Data) Router Interface address: 10.186.5.1
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 480
Options: (No TOS-capability, No DC)
LS Type: Router Links
Link State ID: 10.151.22.22
Advertising Router: 10.151.22.22
LS Seq Number: 80000019
Checksum: 0xC41A
Length: 48
Area Border Router
AS Boundary Router
Number of Links: 2
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.229.6.6
(Link Data) Router Interface address: 10.229.6.2
Number of MTID metrics: 0
TOS 0 Metrics: 40
Link connected to: another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.229.11.11
(Link Data) Router Interface address: 0.0.0.6
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 318
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.115.55.55
Advertising Router: 10.115.55.55
LS Seq Number: 80000037
Checksum: 0xE7BC
Length: 60
Number of Links: 3
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.115.55.55
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.115.6.6
(Link Data) Router Interface address: 10.115.6.5
Number of MTID metrics: 0
TOS 0 Metrics: 30
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.186.5.1
(Link Data) Router Interface address: 10.186.5.5
Number of MTID metrics: 0
TOS 0 Metrics: 1
LS age: 520
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.84.66.66
Advertising Router: 10.84.66.66
LS Seq Number: 8000003C
Checksum: 0x1282
Length: 72
Number of Links: 4
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.84.66.66
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.166.7.6
(Link Data) Router Interface address: 10.166.7.6
Number of MTID metrics: 0
TOS 0 Metrics: 30
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.229.6.6
(Link Data) Router Interface address: 10.229.6.6
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.115.6.6
(Link Data) Router Interface address: 10.115.6.6
Number of MTID metrics: 0
TOS 0 Metrics: 30
LS age: 288
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.1.77.77
Advertising Router: 10.1.77.77
LS Seq Number: 80000030
Checksum: 0x1379
Length: 60
Number of Links: 3
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.1.77.77
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.166.7.6
(Link Data) Router Interface address: 10.166.7.7
Number of MTID metrics: 0
TOS 0 Metrics: 30
Link connected to: a Transit Network
(Link ID) Designated Router address: 10.19.7.7
(Link Data) Router Interface address: 10.19.7.7
Number of MTID metrics: 0
TOS 0 Metrics: 1
OSPF Router with ID (10.115.11.11) (Process ID 3)
Router Link States (Area 0)
LS age: 50
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.115.11.11
Advertising Router: 10.115.11.11
LS Seq Number: 80000001
Checksum: 0x881A
Length: 36
AS Boundary Router
Number of Links: 1
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.115.11.11
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Router Link States (Area 11)
LS age: 8
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.115.11.11
Advertising Router: 10.115.11.11
LS Seq Number: 80000001
Checksum: 0x1D1B
Length: 24
AS Boundary Router
Number of Links: 0
'''}
def test_show_ip_ospf_database_router_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabaseRouter(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_router_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseRouter(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ==============================================
# Unit test for 'show ip ospf database external'
# ==============================================
class test_show_ip_ospf_database_external(unittest.TestCase):
'''Unit test for "show ip ospf database external" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{5:
{'lsa_type': 5,
'lsas':
{'10.94.44.44 10.64.4.4':
{'adv_router': '10.64.4.4',
'lsa_id': '10.94.44.44',
'ospfv2':
{'body':
{'external':
{'network_mask': '255.255.255.255',
'topologies':
{0:
{'external_route_tag': 0,
'flags': 'E',
'forwarding_address': '0.0.0.0',
'metric': 20,
'mt_id': 0}}}},
'header':
{'adv_router': '10.64.4.4',
'age': 1595,
'checksum': '0x7F60',
'length': 36,
'lsa_id': '10.94.44.44',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 5}}}}}}}}}},
'2': {}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf database external
OSPF Router with ID (10.4.1.1) (Process ID 1)
Type-5 AS External Link States
LS age: 1595
Options: (No TOS-capability, DC, Upward)
LS Type: AS External Link
Link State ID: 10.94.44.44 (External Network Number )
Advertising Router: 10.64.4.4
LS Seq Number: 80000001
Checksum: 0x7F60
Length: 36
Network Mask: /32
Metric Type: 2 (Larger than any link state path)
MTID: 0
Metric: 20
Forward Address: 0.0.0.0
External Route Tag: 0
OSPF Router with ID (10.229.11.11) (Process ID 2)
'''}
def test_show_ip_ospf_database_external_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabaseExternal(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_external_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseExternal(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ==============================================
# Unit test for 'show ip ospf database netwwork'
# ==============================================
class test_show_ip_ospf_database_network(unittest.TestCase):
'''Unit test for "show ip ospf database network" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{2:
{'lsa_type': 2,
'lsas':
{'10.1.2.1 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.2.1',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.4.1.1': {},
'10.16.2.2': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.4.1.1',
'age': 786,
'checksum': '0x3DD0',
'length': 32,
'lsa_id': '10.1.2.1',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000000F',
'type': 2}}},
'10.1.4.4 10.64.4.4':
{'adv_router': '10.64.4.4',
'lsa_id': '10.1.4.4',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.4.1.1': {},
'10.64.4.4': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.64.4.4',
'age': 1496,
'checksum': '0xA431',
'length': 32,
'lsa_id': '10.1.4.4',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000002E',
'type': 2}}},
'10.2.3.3 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.2.3.3',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.16.2.2': {},
'10.36.3.3': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.36.3.3',
'age': 774,
'checksum': '0x2ACF',
'length': 32,
'lsa_id': '10.2.3.3',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000000F',
'type': 2}}},
'10.2.4.4 10.64.4.4':
{'adv_router': '10.64.4.4',
'lsa_id': '10.2.4.4',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.16.2.2': {},
'10.64.4.4': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.64.4.4',
'age': 747,
'checksum': '0x9E6',
'length': 32,
'lsa_id': '10.2.4.4',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '8000000F',
'type': 2}}},
'10.3.4.4 10.64.4.4':
{'adv_router': '10.64.4.4',
'lsa_id': '10.3.4.4',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.36.3.3': {},
'10.64.4.4': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.64.4.4',
'age': 992,
'checksum': '0xF0DA',
'length': 32,
'lsa_id': '10.3.4.4',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '8000002E',
'type': 2}}}}}}}}}},
'2':
{'areas':
{'0.0.0.1':
{'database':
{'lsa_types':
{2:
{'lsa_type': 2,
'lsas':
{'10.186.5.1 10.229.11.11':
{'adv_router': '10.229.11.11',
'lsa_id': '10.186.5.1',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.229.11.11': {},
'10.115.55.55': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.229.11.11',
'age': 1445,
'checksum': '0xDFD8',
'length': 32,
'lsa_id': '10.186.5.1',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000032',
'type': 2}}},
'10.229.6.6 10.84.66.66':
{'adv_router': '10.84.66.66',
'lsa_id': '10.229.6.6',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.151.22.22': {},
'10.84.66.66': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.84.66.66',
'age': 1073,
'checksum': '0x415E',
'length': 32,
'lsa_id': '10.229.6.6',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000000F',
'type': 2}}},
'10.19.7.7 10.1.77.77':
{'adv_router': '10.1.77.77',
'lsa_id': '10.19.7.7',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.36.3.3': {},
'10.1.77.77': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.1.77.77',
'age': 849,
'checksum': '0x5C19',
'length': 32,
'lsa_id': '10.19.7.7',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000002A',
'type': 2}}},
'10.115.6.6 10.84.66.66':
{'adv_router': '10.84.66.66',
'lsa_id': '10.115.6.6',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.115.55.55': {},
'10.84.66.66': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.84.66.66',
'age': 564,
'checksum': '0x619C',
'length': 32,
'lsa_id': '10.115.6.6',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '80000029',
'type': 2}}},
'10.166.7.6 10.84.66.66':
{'adv_router': '10.84.66.66',
'lsa_id': '10.166.7.6',
'ospfv2':
{'body':
{'network':
{'attached_routers':
{'10.84.66.66': {},
'10.1.77.77': {}},
'network_mask': '255.255.255.0'}},
'header':
{'adv_router': '10.84.66.66',
'age': 1845,
'checksum': '0x980A',
'length': 32,
'lsa_id': '10.166.7.6',
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '8000002A',
'type': 2}}}}}}}}}}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf database network
OSPF Router with ID (10.4.1.1) (Process ID 1)
Net Link States (Area 0)
LS age: 786
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.1.2.1 (address of Designated Router)
Advertising Router: 10.4.1.1
LS Seq Number: 8000000F
Checksum: 0x3DD0
Length: 32
Network Mask: /24
Attached Router: 10.4.1.1
Attached Router: 10.16.2.2
LS age: 1496
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.1.4.4 (address of Designated Router)
Advertising Router: 10.64.4.4
LS Seq Number: 8000002E
Checksum: 0xA431
Length: 32
Network Mask: /24
Attached Router: 10.64.4.4
Attached Router: 10.4.1.1
LS age: 774
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.2.3.3 (address of Designated Router)
Advertising Router: 10.36.3.3
LS Seq Number: 8000000F
Checksum: 0x2ACF
Length: 32
Network Mask: /24
Attached Router: 10.16.2.2
Attached Router: 10.36.3.3
LS age: 747
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.2.4.4 (address of Designated Router)
Advertising Router: 10.64.4.4
LS Seq Number: 8000000F
Checksum: 0x9E6
Length: 32
Network Mask: /24
Attached Router: 10.64.4.4
Attached Router: 10.16.2.2
LS age: 992
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.3.4.4 (address of Designated Router)
Advertising Router: 10.64.4.4
LS Seq Number: 8000002E
Checksum: 0xF0DA
Length: 32
Network Mask: /24
Attached Router: 10.64.4.4
Attached Router: 10.36.3.3
OSPF Router with ID (10.229.11.11) (Process ID 2)
Net Link States (Area 1)
LS age: 1445
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.186.5.1 (address of Designated Router)
Advertising Router: 10.229.11.11
LS Seq Number: 80000032
Checksum: 0xDFD8
Length: 32
Network Mask: /24
Attached Router: 10.229.11.11
Attached Router: 10.115.55.55
LS age: 1073
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.229.6.6 (address of Designated Router)
Advertising Router: 10.84.66.66
LS Seq Number: 8000000F
Checksum: 0x415E
Length: 32
Network Mask: /24
Attached Router: 10.84.66.66
Attached Router: 10.151.22.22
LS age: 849
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.19.7.7 (address of Designated Router)
Advertising Router: 10.1.77.77
LS Seq Number: 8000002A
Checksum: 0x5C19
Length: 32
Network Mask: /24
Attached Router: 10.1.77.77
Attached Router: 10.36.3.3
LS age: 564
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.115.6.6 (address of Designated Router)
Advertising Router: 10.84.66.66
LS Seq Number: 80000029
Checksum: 0x619C
Length: 32
Network Mask: /24
Attached Router: 10.84.66.66
Attached Router: 10.115.55.55
LS age: 1845
Options: (No TOS-capability, DC)
LS Type: Network Links
Link State ID: 10.166.7.6 (address of Designated Router)
Advertising Router: 10.84.66.66
LS Seq Number: 8000002A
Checksum: 0x980A
Length: 32
Network Mask: /24
Attached Router: 10.84.66.66
Attached Router: 10.1.77.77
'''}
def test_show_ip_ospf_database_network_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabaseNetwork(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_network_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseNetwork(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ==============================================
# Unit test for 'show ip ospf database summary'
# ==============================================
class test_show_ip_ospf_database_summary(unittest.TestCase):
'''Unit test for "show ip ospf database summary" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{3:
{'lsa_type': 3,
'lsas':
{'10.186.3.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.186.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 1,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 422,
'checksum': '0x43DC',
'length': 28,
'lsa_id': '10.186.3.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.186.3.0 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.186.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 40,
'mt_id': 0}}}},
'header':
{'adv_router': '10.36.3.3',
'age': 372,
'checksum': '0x6EA1',
'length': 28,
'lsa_id': '10.186.3.0',
'option': 'None',
'option_desc': 'No TOS-capability, No DC, Upward',
'seq_num': '80000002',
'type': 3}}},
'10.229.3.0 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.229.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 40,
'mt_id': 0}}}},
'header':
{'adv_router': '10.36.3.3',
'age': 372,
'checksum': '0x62AC',
'length': 28,
'lsa_id': '10.229.3.0',
'option': 'None',
'option_desc': 'No TOS-capability, No DC, Upward',
'seq_num': '80000002',
'type': 3}}},
'10.229.4.0 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.229.4.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 41,
'mt_id': 0}}}},
'header':
{'adv_router': '10.36.3.3',
'age': 131,
'checksum': '0x5DAD',
'length': 28,
'lsa_id': '10.229.4.0',
'option': 'None',
'option_desc': 'No TOS-capability, No DC, Upward',
'seq_num': '80000004',
'type': 3}}},
'10.19.4.0 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.19.4.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 40,
'mt_id': 0}}}},
'header':
{'adv_router': '10.36.3.3',
'age': 372,
'checksum': '0x4BC1',
'length': 28,
'lsa_id': '10.19.4.0',
'option': 'None',
'option_desc': 'No TOS-capability, No DC, Upward',
'seq_num': '80000002',
'type': 3}}},
'10.64.4.4 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.64.4.4',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.255',
'topologies':
{0:
{'metric': 41,
'mt_id': 0}}}},
'header':
{'adv_router': '10.36.3.3',
'age': 131,
'checksum': '0xEF26',
'length': 28,
'lsa_id': '10.64.4.4',
'option': 'None',
'option_desc': 'No TOS-capability, No DC, Upward',
'seq_num': '80000003',
'type': 3}}}}}}}},
'0.0.0.1':
{'database':
{'lsa_types':
{3:
{'lsa_type': 3,
'lsas':
{'10.4.0.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.4.0.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.0.0',
'topologies':
{0:
{'metric': 10,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 424,
'checksum': '0x5CCA',
'length': 28,
'lsa_id': '10.4.0.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.1.2.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.2.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 111,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 422,
'checksum': '0xC6EF',
'length': 28,
'lsa_id': '10.1.2.0',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC, '
'Upward',
'seq_num': '80000001',
'type': 3}}},
'10.1.3.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 65535,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 364,
'checksum': '0x5FC4',
'length': 28,
'lsa_id': '10.1.3.0',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC, '
'Upward',
'seq_num': '80000002',
'type': 3}}},
'10.2.3.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.2.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 65868,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 365,
'checksum': '0x6174',
'length': 28,
'lsa_id': '10.2.3.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.229.3.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.229.3.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 65575,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 365,
'checksum': '0x628F',
'length': 28,
'lsa_id': '10.229.3.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.229.4.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.229.4.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 65576,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 130,
'checksum': '0x5D90',
'length': 28,
'lsa_id': '10.229.4.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000003',
'type': 3}}},
'10.19.4.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.19.4.0',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.0',
'topologies':
{0:
{'metric': 65575,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 365,
'checksum': '0x4BA4',
'length': 28,
'lsa_id': '10.19.4.0',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.36.3.3 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.36.3.3',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.255',
'topologies':
{0:
{'metric': 65536,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 365,
'checksum': '0x8E97',
'length': 28,
'lsa_id': '10.36.3.3',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000001',
'type': 3}}},
'10.64.4.4 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.64.4.4',
'ospfv2':
{'body':
{'summary':
{'network_mask': '255.255.255.255',
'topologies':
{0:
{'metric': 65576,
'mt_id': 0}}}},
'header':
{'adv_router': '10.4.1.1',
'age': 130,
'checksum': '0xEF09',
'length': 28,
'lsa_id': '10.64.4.4',
'option': 'None',
'option_desc': 'No TOS-capability, DC, Upward',
'seq_num': '80000002',
'type': 3}}}}}}}}}}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf database summary
OSPF Router with ID (10.4.1.1) (Process ID 1)
Summary Net Link States (Area 0)
LS age: 131
Options: (No TOS-capability, No DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.64.4.4 (summary Network Number)
Advertising Router: 10.36.3.3
LS Seq Number: 80000003
Checksum: 0xEF26
Length: 28
Network Mask: /32
MTID: 0 Metric: 41
LS age: 422
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.186.3.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x43DC
Length: 28
Network Mask: /24
MTID: 0 Metric: 1
LS age: 372
Options: (No TOS-capability, No DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.186.3.0 (summary Network Number)
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x6EA1
Length: 28
Network Mask: /24
MTID: 0 Metric: 40
LS age: 372
Options: (No TOS-capability, No DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.229.3.0 (summary Network Number)
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x62AC
Length: 28
Network Mask: /24
MTID: 0 Metric: 40
LS age: 131
Options: (No TOS-capability, No DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.229.4.0 (summary Network Number)
Advertising Router: 10.36.3.3
LS Seq Number: 80000004
Checksum: 0x5DAD
Length: 28
Network Mask: /24
MTID: 0 Metric: 41
LS age: 372
Options: (No TOS-capability, No DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.19.4.0 (summary Network Number)
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x4BC1
Length: 28
Network Mask: /24
MTID: 0 Metric: 40
Summary Net Link States (Area 1)
LS age: 424
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.4.0.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x5CCA
Length: 28
Network Mask: /16
MTID: 0 Metric: 10
LS age: 365
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.36.3.3 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x8E97
Length: 28
Network Mask: /32
MTID: 0 Metric: 65536
LS age: 130
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.64.4.4 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000002
Checksum: 0xEF09
Length: 28
Network Mask: /32
MTID: 0 Metric: 65576
LS age: 422
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.1.2.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xC6EF
Length: 28
Network Mask: /24
MTID: 0 Metric: 111
LS age: 364
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.1.3.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000002
Checksum: 0x5FC4
Length: 28
Network Mask: /24
MTID: 0 Metric: 65535
LS age: 365
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.2.3.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x6174
Length: 28
Network Mask: /24
MTID: 0 Metric: 65868
LS age: 365
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.229.3.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x628F
Length: 28
Network Mask: /24
MTID: 0 Metric: 65575
LS age: 130
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.229.4.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000003
Checksum: 0x5D90
Length: 28
Network Mask: /24
MTID: 0 Metric: 65576
LS age: 365
Options: (No TOS-capability, DC, Upward)
LS Type: Summary Links(Network)
Link State ID: 10.19.4.0 (summary Network Number)
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x4BA4
Length: 28
Network Mask: /24
MTID: 0 Metric: 65575
'''}
def test_show_ip_ospf_database_summary_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabaseSummary(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_summary_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseSummary(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# =================================================
# Unit test for 'show ip ospf database opaque-area'
# =================================================
class test_show_ip_ospf_database_opaque_area(unittest.TestCase):
'''Unit test for commands:
* 'show ip ospf database opaque-area'
* 'show ip ospf database opaque-area self-originate'
* 'show ip ospf database opaque-area adv-router {address}'
'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'database':
{'lsa_types':
{10:
{'lsa_type': 10,
'lsas':
{'10.1.0.0 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.0',
'ospfv2':
{'body':
{'opaque':
{'num_of_links': 0,
'mpls_te_router_id': '10.4.1.1',}},
'header':
{'adv_router': '10.4.1.1',
'age': 370,
'checksum': '0x56D2',
'fragment_number': 0,
'length': 28,
'lsa_id': '10.1.0.0',
'opaque_id': 0,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.0 10.16.2.2':
{'adv_router': '10.16.2.2',
'lsa_id': '10.1.0.0',
'ospfv2':
{'body':
{'opaque':
{'num_of_links': 0,
'mpls_te_router_id': '10.16.2.2',}},
'header':
{'adv_router': '10.16.2.2',
'age': 1420,
'checksum': '0x1E21',
'fragment_number': 0,
'length': 28,
'lsa_id': '10.1.0.0',
'opaque_id': 0,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'No '
'DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.0 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.1.0.0',
'ospfv2':
{'body':
{'opaque':
{'num_of_links': 0,
'mpls_te_router_id': '10.36.3.3',}},
'header':
{'adv_router': '10.36.3.3',
'age': 123,
'checksum': '0x5EBA',
'fragment_number': 0,
'length': 28,
'lsa_id': '10.1.0.0',
'opaque_id': 0,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.1 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.1',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'igp_metric': 1,
'link_id': '10.1.4.4',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs':
{'10.1.4.1': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs':
{'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.4.1.1',
'age': 370,
'checksum': '0x6586',
'fragment_number': 1,
'length': 124,
'lsa_id': '10.1.0.1',
'opaque_id': 1,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.2 10.4.1.1':
{'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.2',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'igp_metric': 1,
'link_id': '10.1.2.1',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs': {'10.1.2.1': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs': {'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.4.1.1',
'age': 370,
'checksum': '0xB43D',
'fragment_number': 2,
'length': 124,
'lsa_id': '10.1.0.2',
'opaque_id': 2,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.37 10.16.2.2':
{'adv_router': '10.16.2.2',
'lsa_id': '10.1.0.37',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'link_id': '10.2.3.3',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs': {'10.2.3.2': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs': {'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.16.2.2',
'age': 1010,
'checksum': '0xE691',
'fragment_number': 37,
'length': 116,
'lsa_id': '10.1.0.37',
'opaque_id': 37,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, No DC',
'seq_num': '80000003',
'type': 10}}},
'10.1.0.38 10.16.2.2':
{'adv_router': '10.16.2.2',
'lsa_id': '10.1.0.38',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'link_id': '10.2.4.4',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs':
{'10.2.4.2': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs':
{'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.16.2.2',
'age': 1000,
'checksum': '0x254F',
'fragment_number': 38,
'length': 116,
'lsa_id': '10.1.0.38',
'opaque_id': 38,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, No DC',
'seq_num': '80000003',
'type': 10}}},
'10.1.0.39 10.16.2.2':
{'adv_router': '10.16.2.2',
'lsa_id': '10.1.0.39',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'link_id': '10.1.2.1',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs':
{'10.1.2.2': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs':
{'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.16.2.2',
'age': 1000,
'checksum': '0x4438',
'fragment_number': 39,
'length': 116,
'lsa_id': '10.1.0.39',
'opaque_id': 39,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, No DC',
'seq_num': '80000003',
'type': 10}}},
'10.1.0.4 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.1.0.4',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'igp_metric': 1,
'link_id': '10.3.4.4',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs': {'10.3.4.3': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs': {'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.36.3.3',
'age': 123,
'checksum': '0x915D',
'fragment_number': 4,
'length': 160,
'lsa_id': '10.1.0.4',
'opaque_id': 4,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000002',
'type': 10}}},
'10.1.0.6 10.36.3.3':
{'adv_router': '10.36.3.3',
'lsa_id': '10.1.0.6',
'ospfv2':
{'body':
{'opaque':
{'link_tlvs':
{1:
{'admin_group': '0x0',
'igp_metric': 1,
'link_id': '10.2.3.3',
'link_name': 'broadcast network',
'link_type': 2,
'local_if_ipv4_addrs':
{'10.2.3.3': {}},
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'remote_if_ipv4_addrs': {'0.0.0.0': {}},
'te_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}},
'num_of_links': 1}},
'header':
{'adv_router': '10.36.3.3',
'age': 123,
'checksum': '0x5EC',
'fragment_number': 6,
'length': 160,
'lsa_id': '10.1.0.6',
'opaque_id': 6,
'opaque_type': 1,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'seq_num': '80000002',
'type': 10}}}}}}}}}},
'2': {}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf database opaque-area
OSPF Router with ID (10.4.1.1) (Process ID 1)
Type-10 Opaque Area Link States (Area 0)
LS age: 370
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.0
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 80000002
Checksum: 0x56D2
Length: 28
Fragment number : 0
MPLS TE router ID : 10.4.1.1
Number of Links : 0
LS age: 1420
Options: (No TOS-capability, No DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.0
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 0
Advertising Router: 10.16.2.2
LS Seq Number: 80000002
Checksum: 0x1E21
Length: 28
Fragment number : 0
MPLS TE router ID : 10.16.2.2
Number of Links : 0
LS age: 123
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.0
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 0
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x5EBA
Length: 28
Fragment number : 0
MPLS TE router ID : 10.36.3.3
Number of Links : 0
LS age: 370
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.1
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 1
Advertising Router: 10.4.1.1
LS Seq Number: 80000002
Checksum: 0x6586
Length: 124
Fragment number : 1
Link connected to Broadcast network
Link ID : 10.1.4.4
Interface Address : 10.1.4.1
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
IGP Metric : 1
Number of Links : 1
LS age: 370
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.2
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 2
Advertising Router: 10.4.1.1
LS Seq Number: 80000002
Checksum: 0xB43D
Length: 124
Fragment number : 2
Link connected to Broadcast network
Link ID : 10.1.2.1
Interface Address : 10.1.2.1
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
IGP Metric : 1
Number of Links : 1
LS age: 123
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.4
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 4
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x915D
Length: 160
Fragment number : 4
Link connected to Broadcast network
Link ID : 10.3.4.4
Interface Address : 10.3.4.3
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
IGP Metric : 1
Unknown SubTLV type 32771 length 32
Number of Links : 1
LS age: 123
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.6
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 6
Advertising Router: 10.36.3.3
LS Seq Number: 80000002
Checksum: 0x5EC
Length: 160
Fragment number : 6
Link connected to Broadcast network
Link ID : 10.2.3.3
Interface Address : 10.2.3.3
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
IGP Metric : 1
Unknown SubTLV type 32771 length 32
Number of Links : 1
LS age: 1010
Options: (No TOS-capability, No DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.37
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 37
Advertising Router: 10.16.2.2
LS Seq Number: 80000003
Checksum: 0xE691
Length: 116
Fragment number : 37
Link connected to Broadcast network
Link ID : 10.2.3.3
Interface Address : 10.2.3.2
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
Number of Links : 1
LS age: 1000
Options: (No TOS-capability, No DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.38
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 38
Advertising Router: 10.16.2.2
LS Seq Number: 80000003
Checksum: 0x254F
Length: 116
Fragment number : 38
Link connected to Broadcast network
Link ID : 10.2.4.4
Interface Address : 10.2.4.2
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
Number of Links : 1
LS age: 1000
Options: (No TOS-capability, No DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.39
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 39
Advertising Router: 10.16.2.2
LS Seq Number: 80000003
Checksum: 0x4438
Length: 116
Fragment number : 39
Link connected to Broadcast network
Link ID : 10.1.2.1
Interface Address : 10.1.2.2
Admin Metric : 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
Number of Links : 1
OSPF Router with ID (10.229.11.11) (Process ID 2)
'''}
golden_parsed_output2 = {
"vrf": {
"default": {
"address_family": {
"ipv4": {
"instance": {
"65109": {
"areas": {
"0.0.0.8": {
"database": {
"lsa_types": {
10: {
"lsa_type": 10,
"lsas": {
"10.1.0.0 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.1.0.0",
"ospfv2": {
"body": {
"opaque": {
"mpls_te_router_id": "10.4.1.1",
"num_of_links": 0
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.1.0.0",
"adv_router": "10.4.1.1",
"opaque_type": 1,
"opaque_id": 0,
"seq_num": "80000001",
"checksum": "0x58D1",
"length": 28,
"fragment_number": 0
}
}
},
"10.1.0.15 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.1.0.15",
"ospfv2": {
"body": {
"opaque": {
"link_tlvs": {
1: {
"link_type": 1,
"link_name": "point-to-point network",
"link_id": "10.16.2.2",
"remote_if_ipv4_addrs": {
"192.168.220.2": {}
},
"local_if_ipv4_addrs": {
"192.168.220.1": {}
},
"te_metric": 1,
"max_bandwidth": 176258176,
"igp_metric": 1
}
},
"num_of_links": 1
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.1.0.15",
"adv_router": "10.4.1.1",
"opaque_type": 1,
"opaque_id": 15,
"seq_num": "80000001",
"checksum": "0x917E",
"length": 80,
"fragment_number": 15
}
}
},
"10.1.0.16 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.1.0.16",
"ospfv2": {
"body": {
"opaque": {
"link_tlvs": {
1: {
"link_type": 1,
"link_name": "point-to-point network",
"link_id": "10.16.2.2",
"remote_if_ipv4_addrs": {
"192.168.111.2": {}
},
"local_if_ipv4_addrs": {
"192.168.111.1": {}
},
"te_metric": 1,
"max_bandwidth": 125000000,
"igp_metric": 1
}
},
"num_of_links": 1
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.1.0.16",
"adv_router": "10.4.1.1",
"opaque_type": 1,
"opaque_id": 16,
"seq_num": "80000001",
"checksum": "0x8A09",
"length": 80,
"fragment_number": 16
}
}
},
"10.1.0.17 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.1.0.17",
"ospfv2": {
"body": {
"opaque": {
"link_tlvs": {
1: {
"link_type": 1,
"link_name": "point-to-point network",
"link_id": "10.16.2.2",
"remote_if_ipv4_addrs": {
"192.168.4.2": {}
},
"local_if_ipv4_addrs": {
"192.168.4.1": {}
},
"te_metric": 1,
"max_bandwidth": 125000000,
"igp_metric": 1
}
},
"num_of_links": 1
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.1.0.17",
"adv_router": "10.4.1.1",
"opaque_type": 1,
"opaque_id": 17,
"seq_num": "80000001",
"checksum": "0xC2CD",
"length": 80,
"fragment_number": 17
}
}
},
"10.1.0.18 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.1.0.18",
"ospfv2": {
"body": {
"opaque": {
"link_tlvs": {
1: {
"link_type": 1,
"link_name": "point-to-point network",
"link_id": "10.16.2.2",
"remote_if_ipv4_addrs": {
"192.168.154.2": {}
},
"local_if_ipv4_addrs": {
"192.168.154.1": {}
},
"te_metric": 1,
"max_bandwidth": 125000000,
"igp_metric": 1
}
},
"num_of_links": 1
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.1.0.18",
"adv_router": "10.4.1.1",
"opaque_type": 1,
"opaque_id": 18,
"seq_num": "80000001",
"checksum": "0xFA92",
"length": 80,
"fragment_number": 18
}
}
},
"10.16.0.0 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.16.0.0",
"ospfv2": {
"body": {
"opaque": {
"router_capabilities_tlv": {
1: {
"tlv_type": "Router Information",
"length": 4,
"information_capabilities": {
"graceful_restart_helper": True,
"stub_router": True
}
}
},
"sr_algorithm_tlv": {
1: {
"tlv_type": "Segment Routing Algorithm",
"length": 2,
"algorithm": {
"spf": True,
"strict_spf": True
}
}
},
"sid_range_tlvs": {
1: {
"tlv_type": "Segment Routing Range",
"length": 12,
"range_size": 8000,
"sub_tlvs": {
1: {
"type": "SID/Label",
"length": 3,
"label": 16000
}
}
}
},
"node_msd_tlvs": {
1: {
"tlv_type": "Segment Routing Node MSD",
"length": 2,
"sub_type": {
"node_max_sid_depth_value": 13
}
}
},
"local_block_tlvs": {
1: {
"tlv_type": "Segment Routing Local Block",
"length": 12,
"range_size": 1000,
"sub_tlvs": {
1: {
"type": "SID/Label",
"length": 3,
"label": 15000
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.16.0.0",
"adv_router": "10.4.1.1",
"opaque_id": 0,
"seq_num": "80000001",
"checksum": "0xD28C",
"length": 76
}
}
},
"10.49.0.0 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.49.0.0",
"ospfv2": {
"body": {
"opaque": {
"extended_prefix_tlvs": {
1: {
"tlv_type": "Extended Prefix",
"length": 20,
"prefix": "10.4.1.1/32",
"af": 0,
"route_type": "Intra",
"flags": "N-bit",
"sub_tlvs": {
1: {
"type": "Prefix SID",
"length": 8,
"flags": "None",
"mt_id": 0,
"algo": "SPF",
"sid": 1
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.49.0.0",
"adv_router": "10.4.1.1",
"opaque_id": 0,
"seq_num": "80000001",
"checksum": "0xEFA7",
"length": 44
}
}
},
"10.64.0.20 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.64.0.20",
"ospfv2": {
"body": {
"opaque": {
"extended_link_tlvs": {
1: {
"tlv_type": "Extended Link",
"length": 68,
"link_name": "another router (point-to-point)",
"link_type": 1,
"link_id": "10.16.2.2",
"link_data": "192.168.220.1",
"sub_tlvs": {
1: {
"type": "Adj SID",
"length": 7,
"flags": "L-Bit, V-bit",
"mt_id": 0,
"weight": 0,
"label": 19
},
2: {
"type": "Remote Intf Addr",
"remote_interface_address": "192.168.220.2"
},
3: {
"type": "Local / Remote Intf ID",
"local_interface_id": 20,
"remote_interface_id": 20
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.64.0.20",
"adv_router": "10.4.1.1",
"opaque_id": 20,
"seq_num": "80000001",
"checksum": "0xF52F",
"length": 92
}
}
},
"10.64.0.21 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.64.0.21",
"ospfv2": {
"body": {
"opaque": {
"extended_link_tlvs": {
1: {
"tlv_type": "Extended Link",
"length": 68,
"link_name": "another router (point-to-point)",
"link_type": 1,
"link_id": "10.16.2.2",
"link_data": "192.168.111.1",
"sub_tlvs": {
1: {
"type": "Adj SID",
"length": 7,
"flags": "L-Bit, V-bit",
"mt_id": 0,
"weight": 0,
"label": 18
},
2: {
"type": "Remote Intf Addr",
"remote_interface_address": "192.168.111.2"
},
3: {
"type": "Local / Remote Intf ID",
"local_interface_id": 21,
"remote_interface_id": 22
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.64.0.21",
"adv_router": "10.4.1.1",
"opaque_id": 21,
"seq_num": "80000001",
"checksum": "0xB764",
"length": 92
}
}
},
"10.64.0.22 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.64.0.22",
"ospfv2": {
"body": {
"opaque": {
"extended_link_tlvs": {
1: {
"tlv_type": "Extended Link",
"length": 68,
"link_name": "another router (point-to-point)",
"link_type": 1,
"link_id": "10.16.2.2",
"link_data": "192.168.4.1",
"sub_tlvs": {
1: {
"type": "Adj SID",
"length": 7,
"flags": "L-Bit, V-bit",
"mt_id": 0,
"weight": 0,
"label": 17
},
2: {
"type": "Remote Intf Addr",
"remote_interface_address": "192.168.4.2"
},
3: {
"type": "Local / Remote Intf ID",
"local_interface_id": 22,
"remote_interface_id": 23
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.64.0.22",
"adv_router": "10.4.1.1",
"opaque_id": 22,
"seq_num": "80000001",
"checksum": "0xF420",
"length": 92
}
}
},
"10.64.0.23 10.4.1.1": {
"adv_router": "10.4.1.1",
"lsa_id": "10.64.0.23",
"ospfv2": {
"body": {
"opaque": {
"extended_link_tlvs": {
1: {
"tlv_type": "Extended Link",
"length": 68,
"link_name": "another router (point-to-point)",
"link_type": 1,
"link_id": "10.16.2.2",
"link_data": "192.168.154.1",
"sub_tlvs": {
1: {
"type": "Adj SID",
"length": 7,
"flags": "L-Bit, V-bit",
"mt_id": 0,
"weight": 0,
"label": 16
},
2: {
"type": "Remote Intf Addr",
"remote_interface_address": "192.168.154.2"
},
3: {
"type": "Local / Remote Intf ID",
"local_interface_id": 23,
"remote_interface_id": 24
}
}
}
}
}
},
"header": {
"age": 49,
"option": "None",
"option_desc": "No TOS-capability, DC",
"type": 10,
"lsa_id": "10.64.0.23",
"adv_router": "10.4.1.1",
"opaque_id": 23,
"seq_num": "80000001",
"checksum": "0x32DB",
"length": 92
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
golden_output2 = {'execute.return_value': '''
PE1#show ip ospf database opaque-area self-originate
OSPF Router with ID (10.4.1.1) (Process ID 65109)
Type-10 Opaque Area Link States (Area 8)
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.0
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x58D1
Length: 28
Fragment number : 0
MPLS TE router ID : 10.4.1.1
Number of Links : 0
LS age: MAXAGE(49)
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.15
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 15
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x917E
Length: 80
Fragment number : 15
Link connected to Point-to-Point network
Link ID : 10.16.2.2
Neighbor Address : 192.168.220.2
Interface Address : 192.168.220.1
Admin Metric : 1
Maximum bandwidth : 176258176
IGP Metric : 1
Number of Links : 1
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.16
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 16
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x8A09
Length: 80
Fragment number : 16
Link connected to Point-to-Point network
Link ID : 10.16.2.2
Neighbor Address : 192.168.111.2
Interface Address : 192.168.111.1
Admin Metric : 1
Maximum bandwidth : 125000000
IGP Metric : 1
Number of Links : 1
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.17
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 17
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xC2CD
Length: 80
Fragment number : 17
Link connected to Point-to-Point network
Link ID : 10.16.2.2
Neighbor Address : 192.168.4.2
Interface Address : 192.168.4.1
Admin Metric : 1
Maximum bandwidth : 125000000
IGP Metric : 1
Number of Links : 1
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.18
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 18
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xFA92
Length: 80
Fragment number : 18
Link connected to Point-to-Point network
Link ID : 10.16.2.2
Neighbor Address : 192.168.154.2
Interface Address : 192.168.154.1
Admin Metric : 1
Maximum bandwidth : 125000000
IGP Metric : 1
Number of Links : 1
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.16.0.0
Opaque Type: 4 (Router Information)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xD28C
Length: 76
TLV Type: Router Information
Length: 4
Capabilities:
Graceful Restart Helper
Stub Router Support
TLV Type: Segment Routing Algorithm
Length: 2
Algorithm: SPF
Algorithm: Strict SPF
TLV Type: Segment Routing Range
Length: 12
Range Size: 8000
Sub-TLV Type: SID/Label
Length: 3
Label: 16000
TLV Type: Segment Routing Node MSD
Length: 2
Sub-type: Node Max Sid Depth, Value: 13
TLV Type: Segment Routing Local Block
Length: 12
Range Size: 1000
Sub-TLV Type: SID/Label
Length: 3
Label: 15000
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.49.0.0
Opaque Type: 7 (Extended Prefix)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xEFA7
Length: 44
TLV Type: Extended Prefix
Length: 20
Prefix : 10.4.1.1/32
AF : 0
Route-type: Intra
Flags : N-bit
Sub-TLV Type: Prefix SID
Length: 8
Flags : None
MTID : 0
Algo : SPF
SID : 1
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.20
Opaque Type: 8 (Extended Link)
Opaque ID: 20
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xF52F
Length: 92
TLV Type: Extended Link
Length: 68
Link connected to : another Router (point-to-point)
(Link ID) Designated Router address: 10.16.2.2
(Link Data) Interface IP address: 192.168.220.1
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 19
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 192.168.220.2
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 20
Remote Interface ID : 20
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.21
Opaque Type: 8 (Extended Link)
Opaque ID: 21
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xB764
Length: 92
TLV Type: Extended Link
Length: 68
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.16.2.2
(Link Data) Interface IP address: 192.168.111.1
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 18
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 192.168.111.2
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 21
Remote Interface ID : 22
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.22
Opaque Type: 8 (Extended Link)
Opaque ID: 22
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0xF420
Length: 92
TLV Type: Extended Link
Length: 68
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.16.2.2
(Link Data) Interface IP address: 192.168.4.1
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 17
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 192.168.4.2
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 22
Remote Interface ID : 23
LS age: 49
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.23
Opaque Type: 8 (Extended Link)
Opaque ID: 23
Advertising Router: 10.4.1.1
LS Seq Number: 80000001
Checksum: 0x32DB
Length: 92
TLV Type: Extended Link
Length: 68
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.16.2.2
(Link Data) Interface IP address: 192.168.154.1
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 16
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 192.168.154.2
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 23
Remote Interface ID : 24
'''}
golden_output3 = {'execute.return_value': '''
show ip ospf database opaque-area adv-router 10.4.1.1
OSPF Router with ID (10.4.1.1) (Process ID 65109)
Type-10 Opaque Area Link States (Area 8)
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.0
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 8000013B
Checksum: 0xE00E
Length: 28
Fragment number : 0
MPLS TE router ID : 10.4.1.1
Number of Links : 0
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.3
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 3
Advertising Router: 10.4.1.1
LS Seq Number: 8000013B
Checksum: 0xFF9E
Length: 80
Fragment number : 3
Link connected to Point-to-Point network
Link ID : 10.229.11.11
Neighbor Address : 10.0.0.9
Interface Address : 10.0.0.10
Admin Metric : 10
Maximum bandwidth : 125000000
IGP Metric : 10
Number of Links : 1
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.4
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 4
Advertising Router: 10.4.1.1
LS Seq Number: 8000013B
Checksum: 0xAE06
Length: 80
Fragment number : 4
Link connected to Point-to-Point network
Link ID : 10.151.22.22
Neighbor Address : 10.0.0.13
Interface Address : 10.0.0.14
Admin Metric : 100
Maximum bandwidth : 125000000
IGP Metric : 100
Number of Links : 1
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.1.0.5
Opaque Type: 1 (Traffic Engineering)
Opaque ID: 5
Advertising Router: 10.4.1.1
LS Seq Number: 8000013B
Checksum: 0xFE8D
Length: 80
Fragment number : 5
Link connected to Point-to-Point network
Link ID : 10.151.22.22
Neighbor Address : 10.0.0.25
Interface Address : 10.0.0.26
Admin Metric : 1000
Maximum bandwidth : 125000000
IGP Metric : 1000
Number of Links : 1
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.16.0.0
Opaque Type: 4 (Router Information)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 8000013B
Checksum: 0x5BC8
Length: 76
TLV Type: Router Information
Length: 4
Capabilities:
Graceful Restart Helper
Stub Router Support
TLV Type: Segment Routing Algorithm
Length: 2
Algorithm: SPF
Algorithm: Strict SPF
TLV Type: Segment Routing Range
Length: 12
Range Size: 8000
Sub-TLV Type: SID/Label
Length: 3
Label: 16000
TLV Type: Segment Routing Node MSD
Length: 2
Sub-type: Node Max Sid Depth, Value: 13
TLV Type: Segment Routing Local Block
Length: 12
Range Size: 1000
Sub-TLV Type: SID/Label
Length: 3
Label: 15000
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.49.0.0
Opaque Type: 7 (Extended Prefix)
Opaque ID: 0
Advertising Router: 10.4.1.1
LS Seq Number: 80000133
Checksum: 0x88DB
Length: 44
TLV Type: Extended Prefix
Length: 20
Prefix : 10.4.1.1/32
AF : 0
Route-type: Intra
Flags : N-bit
Sub-TLV Type: Prefix SID
Length: 8
Flags : None
MTID : 0
Algo : SPF
SID : 1
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.9
Opaque Type: 8 (Extended Link)
Opaque ID: 9
Advertising Router: 10.4.1.1
LS Seq Number: 8000013C
Checksum: 0xA666
Length: 104
TLV Type: Extended Link
Length: 80
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.229.11.11
(Link Data) Interface IP address: 10.0.0.10
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 18
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit, B-bit
MTID : 0
Weight : 0
Label : 19
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 10.0.0.9
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 9
Remote Interface ID : 9
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.10
Opaque Type: 8 (Extended Link)
Opaque ID: 10
Advertising Router: 10.4.1.1
LS Seq Number: 8000013C
Checksum: 0xEBE6
Length: 104
TLV Type: Extended Link
Length: 80
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.151.22.22
(Link Data) Interface IP address: 10.0.0.14
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 17
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit, B-bit
MTID : 0
Weight : 0
Label : 21
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 10.0.0.13
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 10
Remote Interface ID : 8
LS age: 1663
Options: (No TOS-capability, DC)
LS Type: Opaque Area Link
Link State ID: 10.64.0.11
Opaque Type: 8 (Extended Link)
Opaque ID: 11
Advertising Router: 10.4.1.1
LS Seq Number: 8000013D
Checksum: 0xB8F1
Length: 104
TLV Type: Extended Link
Length: 80
Link connected to : another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.151.22.22
(Link Data) Interface IP address: 10.0.0.26
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit
MTID : 0
Weight : 0
Label : 16
Sub-TLV Type: Adj SID
Length : 7
Flags : L-Bit, V-bit, B-bit
MTID : 0
Weight : 0
Label : 20
Sub-TLV Type: Remote Intf Addr
Remote Interface Address : 10.0.0.25
Sub-TLV Type: Local / Remote Intf ID
Local Interface ID : 11
Remote Interface ID : 9
'''}
golden_parsed_output3 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'65109': {
'areas': {
'0.0.0.8': {
'database': {
'lsa_types': {
10: {
'lsa_type': 10,
'lsas': {
'10.1.0.0 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.0',
'ospfv2': {
'body': {
'opaque': {
'mpls_te_router_id': '10.4.1.1',
'num_of_links': 0
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.1.0.0',
'adv_router': '10.4.1.1',
'opaque_type': 1,
'opaque_id': 0,
'seq_num': '8000013B',
'checksum': '0xE00E',
'length': 28,
'fragment_number': 0
}
}
},
'10.1.0.3 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.3',
'ospfv2': {
'body': {
'opaque': {
'link_tlvs': {
1: {
'link_type': 1,
'link_name': 'point-to-point network',
'link_id': '10.229.11.11',
'remote_if_ipv4_addrs': {
'10.0.0.9': {}
},
'local_if_ipv4_addrs': {
'10.0.0.10': {}
},
'te_metric': 10,
'max_bandwidth': 125000000,
'igp_metric': 10
}
},
'num_of_links': 1
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.1.0.3',
'adv_router': '10.4.1.1',
'opaque_type': 1,
'opaque_id': 3,
'seq_num': '8000013B',
'checksum': '0xFF9E',
'length': 80,
'fragment_number': 3
}
}
},
'10.1.0.4 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.4',
'ospfv2': {
'body': {
'opaque': {
'link_tlvs': {
1: {
'link_type': 1,
'link_name': 'point-to-point network',
'link_id': '10.151.22.22',
'remote_if_ipv4_addrs': {
'10.0.0.13': {}
},
'local_if_ipv4_addrs': {
'10.0.0.14': {}
},
'te_metric': 100,
'max_bandwidth': 125000000,
'igp_metric': 100
}
},
'num_of_links': 1
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.1.0.4',
'adv_router': '10.4.1.1',
'opaque_type': 1,
'opaque_id': 4,
'seq_num': '8000013B',
'checksum': '0xAE06',
'length': 80,
'fragment_number': 4
}
}
},
'10.1.0.5 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.1.0.5',
'ospfv2': {
'body': {
'opaque': {
'link_tlvs': {
1: {
'link_type': 1,
'link_name': 'point-to-point network',
'link_id': '10.151.22.22',
'remote_if_ipv4_addrs': {
'10.0.0.25': {}
},
'local_if_ipv4_addrs': {
'10.0.0.26': {}
},
'te_metric': 1000,
'max_bandwidth': 125000000,
'igp_metric': 1000
}
},
'num_of_links': 1
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.1.0.5',
'adv_router': '10.4.1.1',
'opaque_type': 1,
'opaque_id': 5,
'seq_num': '8000013B',
'checksum': '0xFE8D',
'length': 80,
'fragment_number': 5
}
}
},
'10.16.0.0 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.16.0.0',
'ospfv2': {
'body': {
'opaque': {
'router_capabilities_tlv': {
1: {
'tlv_type': 'Router Information',
'length': 4,
'information_capabilities': {
'graceful_restart_helper': True,
'stub_router': True
}
}
},
'sr_algorithm_tlv': {
1: {
'tlv_type': 'Segment Routing Algorithm',
'length': 2,
'algorithm': {
'spf': True,
'strict_spf': True
}
}
},
'sid_range_tlvs': {
1: {
'tlv_type': 'Segment Routing Range',
'length': 12,
'range_size': 8000,
'sub_tlvs': {
1: {
'type': 'SID/Label',
'length': 3,
'label': 16000
}
}
}
},
'node_msd_tlvs': {
1: {
'tlv_type': 'Segment Routing Node MSD',
'length': 2,
'sub_type': {
'node_max_sid_depth_value': 13
}
}
},
'local_block_tlvs': {
1: {
'tlv_type': 'Segment Routing Local Block',
'length': 12,
'range_size': 1000,
'sub_tlvs': {
1: {
'type': 'SID/Label',
'length': 3,
'label': 15000
}
}
}
}
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.16.0.0',
'adv_router': '10.4.1.1',
'opaque_id': 0,
'seq_num': '8000013B',
'checksum': '0x5BC8',
'length': 76
}
}
},
'10.49.0.0 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.49.0.0',
'ospfv2': {
'body': {
'opaque': {
'extended_prefix_tlvs': {
1: {
'tlv_type': 'Extended Prefix',
'length': 20,
'prefix': '10.4.1.1/32',
'af': 0,
'route_type': 'Intra',
'flags': 'N-bit',
'sub_tlvs': {
1: {
'type': 'Prefix SID',
'length': 8,
'flags': 'None',
'mt_id': 0,
'algo': 'SPF',
'sid': 1
}
}
}
}
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.49.0.0',
'adv_router': '10.4.1.1',
'opaque_id': 0,
'seq_num': '80000133',
'checksum': '0x88DB',
'length': 44
}
}
},
'10.64.0.9 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.64.0.9',
'ospfv2': {
'body': {
'opaque': {
'extended_link_tlvs': {
1: {
'tlv_type': 'Extended Link',
'length': 80,
'link_name': 'another router (point-to-point)',
'link_type': 1,
'link_id': '10.229.11.11',
'link_data': '10.0.0.10',
'sub_tlvs': {
1: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit',
'mt_id': 0,
'weight': 0,
'label': 18
},
2: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit, B-bit',
'mt_id': 0,
'weight': 0,
'label': 19
},
3: {
'type': 'Remote Intf Addr',
'remote_interface_address': '10.0.0.9'
},
4: {
'type': 'Local / Remote Intf ID',
'local_interface_id': 9,
'remote_interface_id': 9
}
}
}
}
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.64.0.9',
'adv_router': '10.4.1.1',
'opaque_id': 9,
'seq_num': '8000013C',
'checksum': '0xA666',
'length': 104
}
}
},
'10.64.0.10 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.64.0.10',
'ospfv2': {
'body': {
'opaque': {
'extended_link_tlvs': {
1: {
'tlv_type': 'Extended Link',
'length': 80,
'link_name': 'another router (point-to-point)',
'link_type': 1,
'link_id': '10.151.22.22',
'link_data': '10.0.0.14',
'sub_tlvs': {
1: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit',
'mt_id': 0,
'weight': 0,
'label': 17
},
2: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit, B-bit',
'mt_id': 0,
'weight': 0,
'label': 21
},
3: {
'type': 'Remote Intf Addr',
'remote_interface_address': '10.0.0.13'
},
4: {
'type': 'Local / Remote Intf ID',
'local_interface_id': 10,
'remote_interface_id': 8
}
}
}
}
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.64.0.10',
'adv_router': '10.4.1.1',
'opaque_id': 10,
'seq_num': '8000013C',
'checksum': '0xEBE6',
'length': 104
}
}
},
'10.64.0.11 10.4.1.1': {
'adv_router': '10.4.1.1',
'lsa_id': '10.64.0.11',
'ospfv2': {
'body': {
'opaque': {
'extended_link_tlvs': {
1: {
'tlv_type': 'Extended Link',
'length': 80,
'link_name': 'another router (point-to-point)',
'link_type': 1,
'link_id': '10.151.22.22',
'link_data': '10.0.0.26',
'sub_tlvs': {
1: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit',
'mt_id': 0,
'weight': 0,
'label': 16
},
2: {
'type': 'Adj SID',
'length': 7,
'flags': 'L-Bit, V-bit, B-bit',
'mt_id': 0,
'weight': 0,
'label': 20
},
3: {
'type': 'Remote Intf Addr',
'remote_interface_address': '10.0.0.25'
},
4: {
'type': 'Local / Remote Intf ID',
'local_interface_id': 11,
'remote_interface_id': 9
}
}
}
}
}
},
'header': {
'age': 1663,
'option': 'None',
'option_desc': 'No TOS-capability, DC',
'type': 10,
'lsa_id': '10.64.0.11',
'adv_router': '10.4.1.1',
'opaque_id': 11,
'seq_num': '8000013D',
'checksum': '0xB8F1',
'length': 104
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
def test_show_ip_ospf_database_opaque_area_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfDatabaseOpaqueArea(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_database_opaque_area_full2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfDatabaseOpaqueAreaSelfOriginate(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_database_opaque_area_adv_router(self):
self.maxDiff = None
self.device = Mock(**self.golden_output3)
obj = ShowIpOspfDatabaseOpaqueAreaAdvRouter(device=self.device)
parsed_output = obj.parse(address='10.4.1.1')
self.assertEqual(parsed_output, self.golden_parsed_output3)
def test_show_ip_ospf_database_opaque_area_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseOpaqueArea(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ===============================================
# Unit test for 'show ip ospf mpls ldp interface'
# ===============================================
class test_show_ip_ospf_mpls_ldp_interface(unittest.TestCase):
'''Unit test for "show ip ospf mpls ldp interface" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'interfaces':
{'GigabitEthernet3':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.1',
'holddown_timer': False,
'igp_sync': False,
'state': 'down',
'state_info': 'pending LDP'}}},
'OSPF_SL1':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.1',
'holddown_timer': False,
'igp_sync': False,
'state': 'up'}}}}}},
'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.1'}}}}}}},
'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'interfaces':
{'GigabitEthernet1':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.0',
'holddown_timer': False,
'igp_sync': False,
'state': 'up'}}},
'GigabitEthernet2':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.0',
'holddown_timer': False,
'igp_sync': False,
'state': 'up'}}},
'TenGigabitEthernet3/0/1':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.0',
'holddown_timer': False,
'igp_sync': False,
'state': 'down'}}},
'Loopback1':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.0',
'holddown_timer': False,
'igp_sync': False,
'state': 'up'}}}}}},
'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.0'}}}}}}}}}
golden_output1 = {'execute.return_value': '''
R1_ospf_xe#show ip ospf mpls ldp interface
Loopback1
Process ID 1, Area 0
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is up
GigabitEthernet2
Process ID 1, Area 0
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is up
GigabitEthernet1
Process ID 1, Area 0
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is up
OSPF_SL1
Process ID 2, VRF VRF1, Area 1
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is up
GigabitEthernet3
Process ID 2, VRF VRF1, Area 1
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is down and pending LDP
TenGigabitEthernet3/0/1
Process ID 1, Area 0
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is down
'''}
golden_parsed_output2 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'65109':
{'areas':
{'0.0.0.8':
{'interfaces':
{'GigabitEthernet0/0/0':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.8',
'holddown_timer': False,
'igp_sync': True,
'state': 'up'}}},
'GigabitEthernet0/0/2':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.8',
'holddown_timer': False,
'igp_sync': True,
'state': 'up'}}},
'Loopback0':
{'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.8',
'holddown_timer': False,
'igp_sync': False,
'state': 'up'}}}}}},
'mpls':
{'ldp':
{'autoconfig': False,
'autoconfig_area_id': '0.0.0.8'}}}}}}}}}
golden_output2 = {'execute.return_value': '''
Router#sh ip ospf mpls ldp interface
Load for five secs: 8%/0%; one minute: 6%; five minutes: 7%
Time source is NTP, 10:36:51.278 EST Mon Nov 7 2016
Loopback0
Process ID 65109, Area 8
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Not required
Holddown timer is disabled
Interface is up
GigabitEthernet0/0/2
Process ID 65109, Area 8
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Required
Holddown timer is not configured
Interface is up
GigabitEthernet0/0/0
Process ID 65109, Area 8
LDP is not configured through LDP autoconfig
LDP-IGP Synchronization : Required
Holddown timer is not configured
Interface is up
'''}
def test_show_ip_ospf_mpls_ldp_interface_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfMplsLdpInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_mpls_ldp_interface_full2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfMplsLdpInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_mpls_ldp_interface_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfMplsLdpInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ==================================================
# Unit test for 'show ip ospf mpls traffic-eng link'
# ==================================================
class test_show_ip_ospf_mpls_traffic_eng_link(unittest.TestCase):
'''Unit test for "show ip ospf mpls traffic-eng link" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'instance':
{'2':
{'areas':
{'0.0.0.1':
{'mpls':
{'te':
{'enable': False}}}},
'mpls':
{'te':
{'router_id': '10.229.11.11'}}}}}}},
'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'areas':
{'0.0.0.0':
{'mpls':
{'te':
{'area_instance': 2,
'enable': True,
'link_hash_bucket':
{8:
{'link_fragments':
{2:
{'affinity_bit': '0x0',
'igp_admin_metric': 1,
'interface_address': '10.1.2.1',
'link_id': '10.1.2.1',
'link_instance': 2,
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'network_type': 'broadcast network',
'te_admin_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}}},
9:
{'link_fragments':
{1:
{'affinity_bit': '0x0',
'igp_admin_metric': 1,
'interface_address': '10.1.4.1',
'link_id': '10.1.4.4',
'link_instance': 2,
'max_bandwidth': 125000000,
'max_reservable_bandwidth': 93750000,
'network_type': 'broadcast network',
'te_admin_metric': 1,
'total_priority': 8,
'unreserved_bandwidths':
{'0 93750000':
{'priority': 0,
'unreserved_bandwidth': 93750000},
'1 93750000':
{'priority': 1,
'unreserved_bandwidth': 93750000},
'2 93750000':
{'priority': 2,
'unreserved_bandwidth': 93750000},
'3 93750000':
{'priority': 3,
'unreserved_bandwidth': 93750000},
'4 93750000':
{'priority': 4,
'unreserved_bandwidth': 93750000},
'5 93750000':
{'priority': 5,
'unreserved_bandwidth': 93750000},
'6 93750000':
{'priority': 6,
'unreserved_bandwidth': 93750000},
'7 93750000':
{'priority': 7,
'unreserved_bandwidth': 93750000}}}}}},
'total_links': 2}}}},
'mpls':
{'te':
{'router_id': '10.4.1.1'}}}}}}}}}
def test_show_ip_ospf_mpls_traffic_eng_link_full1(self):
self.maxDiff = None
def mapper(key):
return self.outputs[key]
raw1 = '''\
R1_ospf_xe#show ip ospf mpls traffic-eng link
OSPF Router with ID (10.4.1.1) (Process ID 1)
Area 0 has 2 MPLS TE links. Area instance is 2.
Links in hash bucket 8.
Link is associated with fragment 2. Link instance is 2
Link connected to Broadcast network
Link ID : 10.1.2.1
Interface Address : 10.1.2.1
Admin Metric te: 1 igp: 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
Links in hash bucket 9.
Link is associated with fragment 1. Link instance is 2
Link connected to Broadcast network
Link ID : 10.1.4.4
Interface Address : 10.1.4.1
Admin Metric te: 1 igp: 1
Maximum bandwidth : 125000000
Maximum reservable bandwidth : 93750000
Number of Priority : 8
Priority 0 : 93750000 Priority 1 : 93750000
Priority 2 : 93750000 Priority 3 : 93750000
Priority 4 : 93750000 Priority 5 : 93750000
Priority 6 : 93750000 Priority 7 : 93750000
Affinity Bit : 0x0
OSPF Router with ID (10.229.11.11) (Process ID 2)
Area 1 MPLS TE not initialized
'''
raw2 = '''\
R1_ospf_xe#show running-config | section router ospf 1
router ospf 1
'''
raw3 = '''\
R1_ospf_xe#show running-config | section router ospf 2
router ospf 2 vrf VRF1
'''
self.outputs = {}
self.outputs['show ip ospf mpls traffic-eng link'] = raw1
self.outputs['show running-config | section router ospf 1'] = raw2
self.outputs['show running-config | section router ospf 2'] = raw3
self.device.execute = Mock()
self.device.execute.side_effect = mapper
obj = ShowIpOspfMplsTrafficEngLink(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_mpls_traffic_eng_link_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfMplsTrafficEngLink(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# =======================================
# Unit test for 'show ip ospf max-metric'
# =======================================
class test_show_ip_ospf_max_metric(unittest.TestCase):
'''Unit test for "show ip ospf max-metric" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'1':
{'router_id': '10.4.1.1',
'base_topology_mtid':
{'0':
{'router_lsa_max_metric':
{False: {},
},
'start_time': '00:01:58.313',
'time_elapsed': '00:54:43.859'}}},
'65109':
{'router_id': '10.0.187.164',
'base_topology_mtid':
{'0':
{'router_lsa_max_metric':
{True:
{'advertise_lsa_metric': 16711680,
'condition': 'on startup for 5 seconds',
'state': 'inactive',
'unset_reason': 'timer expired, Originated for 5 seconds',
'unset_time': '00:02:03.314',
'unset_time_elapsed': '00:54:38.858',
},
},
'start_time': '00:01:58.314',
'time_elapsed': '00:54:43.858'}}}}}}}}}
golden_output1 = {'execute.return_value': '''
Router#sh ip ospf max-metric
Load for five secs: 99%/0%; one minute: 89%; five minutes: 58%
Time source is NTP, 17:13:44.700 EST Sat Nov 12 2016
OSPF Router with ID (10.0.187.164) (Process ID 65109)
Base Topology (MTID 0)
Start time: 00:01:58.314, Time elapsed: 00:54:43.858
Originating router-LSAs with maximum metric
Condition: on startup for 5 seconds, State: inactive
Advertise summary-LSAs with metric 16711680
Unset reason: timer expired, Originated for 5 seconds
Unset time: 00:02:03.314, Time elapsed: 00:54:38.858
OSPF Router with ID (10.4.1.1) (Process ID 1)
Base Topology (MTID 0)
Start time: 00:01:58.313, Time elapsed: 00:54:43.859
Router is not originating router-LSAs with maximum metric
'''}
golden_parsed_output2 = {'vrf': {'default':
{'address_family':
{'ipv4':
{'instance':
{'1111':
{'base_topology_mtid':
{'0':
{'router_lsa_max_metric':
{True:
{'condition': 'on '
'startup '
'for '
'300 '
'seconds',
'state': 'active',
'time_remaining': '00:03:55'}},
'start_time': '00:02:24.554',
'time_elapsed': '00:01:04.061'}},
'router_id': '10.4.1.1'}}}}}}}
golden_output2 = {'execute.return_value': '''
show ip ospf max-metric
Load for five secs: 3%/0%; one minute: 3%; five minutes: 1%
Time source is NTP, *07:52:19.838 EST Tue Jun 18 2019
OSPF Router with ID (10.4.1.1) (Process ID 1111)
Base Topology (MTID 0)
Start time: 00:02:24.554, Time elapsed: 00:01:04.061
Originating router-LSAs with maximum metric, Time remaining: 00:03:55
Condition: on startup for 300 seconds, State: active
'''}
def test_show_ip_ospf_max_metric_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfMaxMetric(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ip_ospf_max_metric_full2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfMaxMetric(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_show_ip_ospf_max_metric_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfMaxMetric(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
# ====================================
# Unit test for 'show ip ospf traffic'
# ====================================
class test_show_ip_ospf_traffic(unittest.TestCase):
'''Unit test for "show ip ospf traffic" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'ospf_statistics':
{'last_clear_traffic_counters': 'never',
'rcvd':
{'checksum_errors': 0,
'database_desc': 938,
'hello': 2024732,
'link_state_acks': 75666,
'link_state_req': 323,
'link_state_updates': 11030,
'total': 2112690},
'sent':
{'database_desc': 1176,
'hello': 2381794,
'link_state_acks': 8893,
'link_state_req': 43,
'link_state_updates': 92224,
'total': 2509472}},
'vrf':
{'default':
{'address_family':
{'ipv4':
{'instance':
{'65109':
{'router_id': '10.169.197.252',
'ospf_queue_statistics':
{'limit':
{'inputq': 0,
'outputq': 0,
'updateq': 200},
'drops':
{'inputq': 0,
'outputq': 0,
'updateq': 0},
'max_delay_msec':
{'inputq': 49,
'outputq': 2,
'updateq': 2},
'max_size':
{'total':
{'inputq': 14,
'outputq': 6,
'updateq': 14,
},
'invalid':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'hello':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'db_des':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_req':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_upd':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_ack':
{'inputq': 14,
'outputq': 6,
'updateq': 14,
},
},
'current_size':
{'total':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'invalid':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'hello':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'db_des':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_req':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_upd':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
'ls_ack':
{'inputq': 0,
'outputq': 0,
'updateq': 0,
},
},
},
'interface_statistics':
{'interfaces':
{'GigabitEthernet0/0/0':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 1,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 4980,
'packets': 145},
'rx_hello':
{'bytes': 18443216,
'packets': 384238},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 713980,
'packets': 11840},
'rx_ls_req':
{'bytes': 9180,
'packets': 57},
'rx_ls_upd':
{'bytes': 242036,
'packets': 2581},
'rx_total':
{'bytes': 19413392,
'packets': 398861},
'tx_db_des':
{'bytes': 50840,
'packets': 475},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 30825036,
'packets': 385336},
'tx_ls_ack':
{'bytes': 187352,
'packets': 2473},
'tx_ls_req':
{'bytes': 404,
'packets': 7},
'tx_ls_upd':
{'bytes': 13558188,
'packets': 12658},
'tx_total':
{'bytes': 44621820,
'packets': 400949}}}},
'GigabitEthernet0/0/1':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 11844,
'packets': 47},
'rx_hello':
{'bytes': 18812552,
'packets': 391929},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 18804556,
'packets': 19064},
'rx_ls_req':
{'bytes': 25212,
'packets': 22},
'rx_ls_upd':
{'bytes': 231124,
'packets': 1902},
'rx_total':
{'bytes': 37885288,
'packets': 412964},
'tx_db_des':
{'bytes': 54772,
'packets': 53},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 31355000,
'packets': 391938},
'tx_ls_ack':
{'bytes': 167024,
'packets': 1871},
'tx_ls_req':
{'bytes': 6632,
'packets': 10},
'tx_ls_upd':
{'bytes': 26983772,
'packets': 26114},
'tx_total':
{'bytes': 58567200,
'packets': 419986}}}},
'GigabitEthernet0/0/3':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 3,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 25932,
'packets': 636},
'rx_hello':
{'bytes': 20276152,
'packets': 422436},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 788256,
'packets': 12534},
'rx_ls_req':
{'bytes': 29088,
'packets': 191},
'rx_ls_upd':
{'bytes': 170236,
'packets': 1967},
'rx_total':
{'bytes': 21289664,
'packets': 437764},
'tx_db_des':
{'bytes': 73492,
'packets': 508},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 31262032,
'packets': 390845},
'tx_ls_ack':
{'bytes': 127024,
'packets': 1956},
'tx_ls_req':
{'bytes': 644,
'packets': 10},
'tx_ls_upd':
{'bytes': 15890600,
'packets': 15015},
'tx_total':
{'bytes': 47353792,
'packets': 408334}}}},
'GigabitEthernet0/0/4':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 524,
'packets': 12},
'rx_hello':
{'bytes': 14716084,
'packets': 306586},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 613440,
'packets': 10100},
'rx_ls_req':
{'bytes': 1032,
'packets': 6},
'rx_ls_upd':
{'bytes': 165556,
'packets': 1706},
'rx_total':
{'bytes': 15496636,
'packets': 318410},
'tx_db_des':
{'bytes': 2816,
'packets': 19},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 24538936,
'packets': 306737},
'tx_ls_ack':
{'bytes': 132900,
'packets': 1690},
'tx_ls_req':
{'bytes': 336,
'packets': 6},
'tx_ls_upd':
{'bytes': 10449232,
'packets': 11120},
'tx_total':
{'bytes': 35124220,
'packets': 319572}}}},
'GigabitEthernet0/0/5':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 0,
'packets': 0},
'rx_hello':
{'bytes': 0,
'packets': 0},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 0,
'packets': 0},
'rx_ls_req':
{'bytes': 0,
'packets': 0},
'rx_ls_upd':
{'bytes': 0,
'packets': 0},
'rx_total':
{'bytes': 0,
'packets': 0},
'tx_db_des':
{'bytes': 0,
'packets': 0},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 27731564,
'packets': 364889},
'tx_ls_ack':
{'bytes': 0,
'packets': 0},
'tx_ls_req':
{'bytes': 0,
'packets': 0},
'tx_ls_upd':
{'bytes': 0,
'packets': 0},
'tx_total':
{'bytes': 27731564,
'packets': 364889}}}},
'GigabitEthernet0/0/6':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 1232,
'packets': 36},
'rx_hello':
{'bytes': 8125472,
'packets': 169281},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 8733808,
'packets': 9327},
'rx_ls_req':
{'bytes': 25080,
'packets': 20},
'rx_ls_upd':
{'bytes': 76640,
'packets': 908},
'rx_total':
{'bytes': 16962232,
'packets': 179572},
'tx_db_des':
{'bytes': 43560,
'packets': 40},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 13552440,
'packets': 169411},
'tx_ls_ack':
{'bytes': 63396,
'packets': 899},
'tx_ls_req':
{'bytes': 224,
'packets': 4},
'tx_ls_upd':
{'bytes': 12553264,
'packets': 12539},
'tx_total':
{'bytes': 26212884,
'packets': 182893}}}},
'GigabitEthernet0/0/7':
{'last_clear_traffic_counters': 'never',
'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 0,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 2524,
'packets': 62},
'rx_hello':
{'bytes': 16812472,
'packets': 350262},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 759424,
'packets': 12801},
'rx_ls_req':
{'bytes': 4452,
'packets': 27},
'rx_ls_upd':
{'bytes': 11921824,
'packets': 1966},
'rx_total':
{'bytes': 29500696,
'packets': 365118},
'tx_db_des':
{'bytes': 11964,
'packets': 81},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 29795828,
'packets': 372638},
'tx_ls_ack':
{'bytes': 256,
'packets': 4},
'tx_ls_req':
{'bytes': 336,
'packets': 6},
'tx_ls_upd':
{'bytes': 13471532,
'packets': 14778},
'tx_total':
{'bytes': 43279916,
'packets': 387507}}}}}},
'summary_traffic_statistics':
{'ospf_header_errors':
{'adjacency_throttle': 0,
'area_mismatch': 0,
'auth_type': 0,
'authentication': 0,
'bad_source': 0,
'bfd': 0,
'checksum': 0,
'duplicate_id': 0,
'hello': 0,
'instance_id': 0,
'length': 0,
'lls': 0,
'mtu_mismatch': 0,
'nbr_ignored': 3,
'no_sham_link': 0,
'no_virtual_link': 0,
'self_originated': 0,
'test_discard': 0,
'ttl_check_fail': 0,
'unknown_neighbor': 1,
'version': 0},
'ospf_lsa_errors':
{'checksum': 0,
'data': 0,
'length': 0,
'type': 0},
'ospf_packets_received_sent':
{'type':
{'rx_db_des':
{'bytes': 47036,
'packets': 938},
'rx_hello':
{'bytes': 97185948,
'packets': 2024732},
'rx_invalid':
{'bytes': 0,
'packets': 0},
'rx_ls_ack':
{'bytes': 30413464,
'packets': 75666},
'rx_ls_req':
{'bytes': 94044,
'packets': 323},
'rx_ls_upd':
{'bytes': 12807416,
'packets': 11030},
'rx_total':
{'bytes': 140547908,
'packets': 2112689},
'tx_db_des':
{'bytes': 237444,
'packets': 1176},
'tx_failed':
{'bytes': 0,
'packets': 0},
'tx_hello':
{'bytes': 189060836,
'packets': 2381794},
'tx_ls_ack':
{'bytes': 677952,
'packets': 8893},
'tx_ls_req':
{'bytes': 8576,
'packets': 43},
'tx_ls_upd':
{'bytes': 92906588,
'packets': 92224},
'tx_total':
{'bytes': 282891396,
'packets': 2484130}}}}}}}}}}}
golden_output1 = {'execute.return_value': '''
1006#show ip ospf traffic
Load for five secs: 0%/0%; one minute: 0%; five minutes: 0%
Time source is NTP, 16:43:31.626 EST Fri Oct 28 2016
OSPF statistics:
Last clearing of OSPF traffic counters never
Rcvd: 2112690 total, 0 checksum errors
2024732 hello, 938 database desc, 323 link state req
11030 link state updates, 75666 link state acks
Sent: 2509472 total
2381794 hello, 1176 database desc, 43 link state req
92224 link state updates, 8893 link state acks
OSPF Router with ID (10.169.197.252) (Process ID 65109)
OSPF queue statistics for process ID 65109:
InputQ UpdateQ OutputQ
Limit 0 200 0
Drops 0 0 0
Max delay [msec] 49 2 2
Max size 14 14 6
Invalid 0 0 0
Hello 0 0 0
DB des 0 0 0
LS req 0 0 0
LS upd 0 0 0
LS ack 14 14 6
Current size 0 0 0
Invalid 0 0 0
Hello 0 0 0
DB des 0 0 0
LS req 0 0 0
LS upd 0 0 0
LS ack 0 0 0
Interface statistics:
Interface GigabitEthernet0/0/6
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 169281 8125472
RX DB des 36 1232
RX LS req 20 25080
RX LS upd 908 76640
RX LS ack 9327 8733808
RX Total 179572 16962232
TX Failed 0 0
TX Hello 169411 13552440
TX DB des 40 43560
TX LS req 4 224
TX LS upd 12539 12553264
TX LS ack 899 63396
TX Total 182893 26212884
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/1
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 391929 18812552
RX DB des 47 11844
RX LS req 22 25212
RX LS upd 1902 231124
RX LS ack 19064 18804556
RX Total 412964 37885288
TX Failed 0 0
TX Hello 391938 31355000
TX DB des 53 54772
TX LS req 10 6632
TX LS upd 26114 26983772
TX LS ack 1871 167024
TX Total 419986 58567200
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/4
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 306586 14716084
RX DB des 12 524
RX LS req 6 1032
RX LS upd 1706 165556
RX LS ack 10100 613440
RX Total 318410 15496636
TX Failed 0 0
TX Hello 306737 24538936
TX DB des 19 2816
TX LS req 6 336
TX LS upd 11120 10449232
TX LS ack 1690 132900
TX Total 319572 35124220
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/0
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 384238 18443216
RX DB des 145 4980
RX LS req 57 9180
RX LS upd 2581 242036
RX LS ack 11840 713980
RX Total 398861 19413392
TX Failed 0 0
TX Hello 385336 30825036
TX DB des 475 50840
TX LS req 7 404
TX LS upd 12658 13558188
TX LS ack 2473 187352
TX Total 400949 44621820
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 1,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/3
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 422436 20276152
RX DB des 636 25932
RX LS req 191 29088
RX LS upd 1967 170236
RX LS ack 12534 788256
RX Total 437764 21289664
TX Failed 0 0
TX Hello 390845 31262032
TX DB des 508 73492
TX LS req 10 644
TX LS upd 15015 15890600
TX LS ack 1956 127024
TX Total 408334 47353792
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 3, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/5
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 0 0
RX DB des 0 0
RX LS req 0 0
RX LS upd 0 0
RX LS ack 0 0
RX Total 0 0
TX Failed 0 0
TX Hello 364889 27731564
TX DB des 0 0
TX LS req 0 0
TX LS upd 0 0
TX LS ack 0 0
TX Total 364889 27731564
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/0/7
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 350262 16812472
RX DB des 62 2524
RX LS req 27 4452
RX LS upd 1966 11921824
RX LS ack 12801 759424
RX Total 365118 29500696
TX Failed 0 0
TX Hello 372638 29795828
TX DB des 81 11964
TX LS req 6 336
TX LS upd 14778 13471532
TX LS ack 4 256
TX Total 387507 43279916
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Summary traffic statistics for process ID 65109:
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 2024732 97185948
RX DB des 938 47036
RX LS req 323 94044
RX LS upd 11030 12807416
RX LS ack 75666 30413464
RX Total 2112689 140547908
TX Failed 0 0
TX Hello 2381794 189060836
TX DB des 1176 237444
TX LS req 43 8576
TX LS upd 92224 92906588
TX LS ack 8893 677952
TX Total 2484130 282891396
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 3, LLS 0, Unknown Neighbor 1,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
1006#
'''}
golden_parsed_output = {
'ospf_statistics': {
'last_clear_traffic_counters': 'never',
'rcvd': {
'total': 1082870,
'checksum_errors': 0,
'hello': 961667,
'database_desc': 1688,
'link_state_req': 32,
'link_state_updates': 94694,
'link_state_acks': 24370,
},
'sent': {
'total': 1072239,
'hello': 932534,
'database_desc': 1251,
'link_state_req': 170,
'link_state_updates': 74590,
'link_state_acks': 63700,
},
},
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'888': {
'router_id': '192.168.36.220',
'ospf_queue_statistics': {
'limit': {
'inputq': 0,
'updateq': 200,
'outputq': 0,
},
'drops': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'max_delay_msec': {
'inputq': 344,
'updateq': 269,
'outputq': 12,
},
'max_size': {
'total': {
'inputq': 5,
'updateq': 5,
'outputq': 2,
},
'invalid': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'hello': {
'inputq': 1,
'updateq': 0,
'outputq': 1,
},
'db_des': {
'inputq': 2,
'updateq': 0,
'outputq': 1,
},
'ls_req': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_upd': {
'inputq': 2,
'updateq': 5,
'outputq': 0,
},
'ls_ack': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
},
'current_size': {
'total': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'invalid': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'hello': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'db_des': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_req': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_upd': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_ack': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
},
},
'interface_statistics': {
'interfaces': {
'GigabitEthernet0/0/0': {
'last_clear_traffic_counters': 'never',
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 495694,
'bytes': 23793308,
},
'rx_db_des': {
'packets': 1676,
'bytes': 298812,
},
'rx_ls_req': {
'packets': 30,
'bytes': 1392,
},
'rx_ls_upd': {
'packets': 46764,
'bytes': 4399320,
},
'rx_ls_ack': {
'packets': 6580,
'bytes': 316460,
},
'rx_total': {
'packets': 550744,
'bytes': 28809292,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 466574,
'bytes': 37324132,
},
'tx_db_des': {
'packets': 1238,
'bytes': 326112,
},
'tx_ls_req': {
'packets': 169,
'bytes': 10388,
},
'tx_ls_upd': {
'packets': 47473,
'bytes': 4865652,
},
'tx_ls_ack': {
'packets': 36140,
'bytes': 2827140,
},
'tx_total': {
'packets': 551594,
'bytes': 45353424,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 419,
'authentication': 0,
'ttl_check_fail': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
'TenGigabitEthernet0/2/0': {
'last_clear_traffic_counters': 'never',
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 465973,
'bytes': 22366692,
},
'rx_db_des': {
'packets': 12,
'bytes': 1764,
},
'rx_ls_req': {
'packets': 2,
'bytes': 312,
},
'rx_ls_upd': {
'packets': 47930,
'bytes': 4445532,
},
'rx_ls_ack': {
'packets': 17790,
'bytes': 971660,
},
'rx_total': {
'packets': 531707,
'bytes': 27785960,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 465960,
'bytes': 37276652,
},
'tx_db_des': {
'packets': 13,
'bytes': 2592,
},
'tx_ls_req': {
'packets': 1,
'bytes': 56,
},
'tx_ls_upd': {
'packets': 27117,
'bytes': 2661612,
},
'tx_ls_ack': {
'packets': 27560,
'bytes': 2130760,
},
'tx_total': {
'packets': 520651,
'bytes': 42071672,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
},
},
'summary_traffic_statistics': {
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 961667,
'bytes': 46160000,
},
'rx_db_des': {
'packets': 1688,
'bytes': 300576,
},
'rx_ls_req': {
'packets': 32,
'bytes': 1704,
},
'rx_ls_upd': {
'packets': 94694,
'bytes': 8844852,
},
'rx_ls_ack': {
'packets': 24370,
'bytes': 1288120,
},
'rx_total': {
'packets': 1082451,
'bytes': 56595252,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 932534,
'bytes': 74600784,
},
'tx_db_des': {
'packets': 1251,
'bytes': 328704,
},
'tx_ls_req': {
'packets': 170,
'bytes': 10444,
},
'tx_ls_upd': {
'packets': 74590,
'bytes': 7527264,
},
'tx_ls_ack': {
'packets': 63700,
'bytes': 4957900,
},
'tx_total': {
'packets': 1072245,
'bytes': 87425096,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 419,
'authentication': 0,
'ttl_check_fail': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''
show ip ospf traffic
Load for five secs: 6%/1%; one minute: 20%; five minutes: 14%
Time source is NTP, 01:06:03.667 EST Thu Jan 2 2020
OSPF statistics:
Last clearing of OSPF traffic counters never
Rcvd: 1082870 total, 0 checksum errors
961667 hello, 1688 database desc, 32 link state req
94694 link state updates, 24370 link state acks
Sent: 1072239 total
932534 hello, 1251 database desc, 170 link state req
74590 link state updates, 63700 link state acks
OSPF Router with ID (192.168.36.220) (Process ID 888)
OSPF queue statistics for process ID 888:
InputQ UpdateQ OutputQ
Limit 0 200 0
Drops 0 0 0
Max delay [msec] 344 269 12
Max size 5 5 2
Invalid 0 0 0
Hello 1 0 1
DB des 2 0 1
LS req 0 0 0
LS upd 2 5 0
LS ack 0 0 0
Current size 0 0 0
Invalid 0 0 0
Hello 0 0 0
DB des 0 0 0
LS req 0 0 0
LS upd 0 0 0
LS ack 0 0 0
Interface statistics:
Interface GigabitEthernet0/0/0
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 495694 23793308
RX DB des 1676 298812
RX LS req 30 1392
RX LS upd 46764 4399320
RX LS ack 6580 316460
RX Total 550744 28809292
TX Failed 0 0
TX Hello 466574 37324132
TX DB des 1238 326112
TX LS req 169 10388
TX LS upd 47473 4865652
TX LS ack 36140 2827140
TX Total 551594 45353424
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 419,
Authentication 0, TTL Check Fail 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface TenGigabitEthernet0/2/0
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 465973 22366692
RX DB des 12 1764
RX LS req 2 312
RX LS upd 47930 4445532
RX LS ack 17790 971660
RX Total 531707 27785960
TX Failed 0 0
TX Hello 465960 37276652
TX DB des 13 2592
TX LS req 1 56
TX LS upd 27117 2661612
TX LS ack 27560 2130760
TX Total 520651 42071672
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Summary traffic statistics for process ID 888:
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 961667 46160000
RX DB des 1688 300576
RX LS req 32 1704
RX LS upd 94694 8844852
RX LS ack 24370 1288120
RX Total 1082451 56595252
TX Failed 0 0
TX Hello 932534 74600784
TX DB des 1251 328704
TX LS req 170 10444
TX LS upd 74590 7527264
TX LS ack 63700 4957900
TX Total 1072245 87425096
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 419,
Authentication 0, TTL Check Fail 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
'''}
golden_parsed_output2 = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'10000': {
'summary_traffic_statistics': {
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 0,
'bytes': 0,
},
'rx_db_des': {
'packets': 0,
'bytes': 0,
},
'rx_ls_req': {
'packets': 0,
'bytes': 0,
},
'rx_ls_upd': {
'packets': 0,
'bytes': 0,
},
'rx_ls_ack': {
'packets': 0,
'bytes': 0,
},
'rx_total': {
'packets': 0,
'bytes': 0,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 0,
'bytes': 0,
},
'tx_db_des': {
'packets': 0,
'bytes': 0,
},
'tx_ls_req': {
'packets': 0,
'bytes': 0,
},
'tx_ls_upd': {
'packets': 0,
'bytes': 0,
},
'tx_ls_ack': {
'packets': 0,
'bytes': 0,
},
'tx_total': {
'packets': 0,
'bytes': 0,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'adjacency_throttle': 0,
'bfd': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
},
'888': {
'router_id': '10.19.13.14',
'ospf_queue_statistics': {
'limit': {
'inputq': 0,
'updateq': 200,
'outputq': 0,
},
'drops': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'max_delay_msec': {
'inputq': 3,
'updateq': 2,
'outputq': 1,
},
'max_size': {
'total': {
'inputq': 4,
'updateq': 3,
'outputq': 2,
},
'invalid': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'hello': {
'inputq': 4,
'updateq': 0,
'outputq': 1,
},
'db_des': {
'inputq': 0,
'updateq': 0,
'outputq': 1,
},
'ls_req': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_upd': {
'inputq': 0,
'updateq': 3,
'outputq': 0,
},
'ls_ack': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
},
'current_size': {
'total': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'invalid': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'hello': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'db_des': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_req': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_upd': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
'ls_ack': {
'inputq': 0,
'updateq': 0,
'outputq': 0,
},
},
},
'interface_statistics': {
'interfaces': {
'Tunnel65541': {
'last_clear_traffic_counters': 'never',
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 0,
'bytes': 0,
},
'rx_db_des': {
'packets': 0,
'bytes': 0,
},
'rx_ls_req': {
'packets': 0,
'bytes': 0,
},
'rx_ls_upd': {
'packets': 0,
'bytes': 0,
},
'rx_ls_ack': {
'packets': 0,
'bytes': 0,
},
'rx_total': {
'packets': 0,
'bytes': 0,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 62301,
'bytes': 5980896,
},
'tx_db_des': {
'packets': 0,
'bytes': 0,
},
'tx_ls_req': {
'packets': 0,
'bytes': 0,
},
'tx_ls_upd': {
'packets': 0,
'bytes': 0,
},
'tx_ls_ack': {
'packets': 0,
'bytes': 0,
},
'tx_total': {
'packets': 62301,
'bytes': 5980896,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'adjacency_throttle': 0,
'bfd': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
'GigabitEthernet0/1/7': {
'last_clear_traffic_counters': 'never',
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 70493,
'bytes': 3383664,
},
'rx_db_des': {
'packets': 3,
'bytes': 1676,
},
'rx_ls_req': {
'packets': 1,
'bytes': 36,
},
'rx_ls_upd': {
'packets': 14963,
'bytes': 1870388,
},
'rx_ls_ack': {
'packets': 880,
'bytes': 76140,
},
'rx_total': {
'packets': 86340,
'bytes': 5331904,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 1,
'bytes': 100,
},
'tx_db_des': {
'packets': 4,
'bytes': 416,
},
'tx_ls_req': {
'packets': 1,
'bytes': 968,
},
'tx_ls_upd': {
'packets': 1,
'bytes': 108,
},
'tx_ls_ack': {
'packets': 134,
'bytes': 9456,
},
'tx_total': {
'packets': 141,
'bytes': 11048,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'adjacency_throttle': 0,
'bfd': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
'GigabitEthernet0/1/6': {
'last_clear_traffic_counters': 'never',
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 70504,
'bytes': 3384192,
},
'rx_db_des': {
'packets': 3,
'bytes': 1676,
},
'rx_ls_req': {
'packets': 1,
'bytes': 36,
},
'rx_ls_upd': {
'packets': 14809,
'bytes': 1866264,
},
'rx_ls_ack': {
'packets': 877,
'bytes': 76028,
},
'rx_total': {
'packets': 86194,
'bytes': 5328196,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 1,
'bytes': 100,
},
'tx_db_des': {
'packets': 4,
'bytes': 416,
},
'tx_ls_req': {
'packets': 1,
'bytes': 968,
},
'tx_ls_upd': {
'packets': 1,
'bytes': 108,
},
'tx_ls_ack': {
'packets': 117,
'bytes': 8668,
},
'tx_total': {
'packets': 124,
'bytes': 10260,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 0,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'adjacency_throttle': 0,
'bfd': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
},
},
'summary_traffic_statistics': {
'ospf_packets_received_sent': {
'type': {
'rx_invalid': {
'packets': 0,
'bytes': 0,
},
'rx_hello': {
'packets': 159187,
'bytes': 7640968,
},
'rx_db_des': {
'packets': 10240,
'bytes': 337720,
},
'rx_ls_req': {
'packets': 5,
'bytes': 216,
},
'rx_ls_upd': {
'packets': 31899,
'bytes': 4010656,
},
'rx_ls_ack': {
'packets': 2511,
'bytes': 201204,
},
'rx_total': {
'packets': 203842,
'bytes': 12190764,
},
'tx_failed': {
'packets': 0,
'bytes': 0,
},
'tx_hello': {
'packets': 208493,
'bytes': 20592264,
},
'tx_db_des': {
'packets': 10540,
'bytes': 15808320,
},
'tx_ls_req': {
'packets': 5,
'bytes': 3112,
},
'tx_ls_upd': {
'packets': 33998,
'bytes': 5309252,
},
'tx_ls_ack': {
'packets': 17571,
'bytes': 1220144,
},
'tx_total': {
'packets': 270607,
'bytes': 42933092,
},
},
},
'ospf_header_errors': {
'length': 0,
'instance_id': 0,
'checksum': 0,
'auth_type': 0,
'version': 0,
'bad_source': 0,
'no_virtual_link': 0,
'area_mismatch': 0,
'no_sham_link': 0,
'self_originated': 0,
'duplicate_id': 0,
'hello': 0,
'mtu_mismatch': 0,
'nbr_ignored': 2682,
'lls': 0,
'unknown_neighbor': 0,
'authentication': 0,
'ttl_check_fail': 0,
'adjacency_throttle': 0,
'bfd': 0,
'test_discard': 0,
},
'ospf_lsa_errors': {
'type': 0,
'length': 0,
'data': 0,
'checksum': 0,
},
},
},
},
},
},
},
},
'ospf_statistics': {
'last_clear_traffic_counters': 'never',
'rcvd': {
'total': 204136,
'checksum_errors': 0,
'hello': 159184,
'database_desc': 10240,
'link_state_req': 5,
'link_state_updates': 31899,
'link_state_acks': 2511,
},
'sent': {
'total': 281838,
'hello': 219736,
'database_desc': 10540,
'link_state_req': 5,
'link_state_updates': 33998,
'link_state_acks': 17571,
},
},
}
golden_output2 = {'execute.return_value': '''
show ip ospf traffic
Summary traffic statistics for process ID 10000:
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 0 0
RX DB des 0 0
RX LS req 0 0
RX LS upd 0 0
RX LS ack 0 0
RX Total 0 0
TX Failed 0 0
TX Hello 0 0
TX DB des 0 0
TX LS req 0 0
TX LS upd 0 0
TX LS ack 0 0
TX Total 0 0
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
OSPF statistics:
Last clearing of OSPF traffic counters never
Rcvd: 204136 total, 0 checksum errors
159184 hello, 10240 database desc, 5 link state req
31899 link state updates, 2511 link state acks
Sent: 281838 total
219736 hello, 10540 database desc, 5 link state req
33998 link state updates, 17571 link state acks
OSPF Router with ID (10.19.13.14) (Process ID 888)
OSPF queue statistics for process ID 888:
InputQ UpdateQ OutputQ
Limit 0 200 0
Drops 0 0 0
Max delay [msec] 3 2 1
Max size 4 3 2
Invalid 0 0 0
Hello 4 0 1
DB des 0 0 1
LS req 0 0 0
LS upd 0 3 0
LS ack 0 0 0
Current size 0 0 0
Invalid 0 0 0
Hello 0 0 0
DB des 0 0 0
LS req 0 0 0
LS upd 0 0 0
LS ack 0 0 0
Interface statistics:
Interface Tunnel65541
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 0 0
RX DB des 0 0
RX LS req 0 0
RX LS upd 0 0
RX LS ack 0 0
RX Total 0 0
TX Failed 0 0
TX Hello 62301 5980896
TX DB des 0 0
TX LS req 0 0
TX LS upd 0 0
TX LS ack 0 0
TX Total 62301 5980896
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/1/7
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 79715 3826316
RX DB des 54 6708
RX LS req 2 72
RX LS upd 16831 2110728
RX LS ack 1580 122140
RX Total 98182 6065964
TX Failed 0 0
TX Hello 73397 7339656
TX DB des 59 72276
TX LS req 3 2052
TX LS upd 9359 1560172
TX LS ack 9656 671164
TX Total 92474 9645320
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 9, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Neighbor Statistics for interface GigabitEthernet0/1/7
Neighbor 10.189.5.253 traffic statistics
Last clearing of neighbor traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 70493 3383664
RX DB des 3 1676
RX LS req 1 36
RX LS upd 14963 1870388
RX LS ack 880 76140
RX Total 86340 5331904
TX Failed 0 0
TX Hello 1 100
TX DB des 4 416
TX LS req 1 968
TX LS upd 1 108
TX LS ack 134 9456
TX Total 141 11048
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Interface GigabitEthernet0/1/6
Last clearing of interface traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 79472 3814652
RX DB des 10186 331012
RX LS req 3 144
RX LS upd 15068 1899928
RX LS ack 931 79064
RX Total 105660 6124800
TX Failed 0 0
TX Hello 72795 7271712
TX DB des 10481 15736044
TX LS req 2 1060
TX LS upd 24639 3749080
TX LS ack 7915 548980
TX Total 115832 27306876
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 2673, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Neighbor Statistics for interface GigabitEthernet0/1/6
Neighbor 10.189.5.252 traffic statistics
Last clearing of neighbor traffic counters never
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 70504 3384192
RX DB des 3 1676
RX LS req 1 36
RX LS upd 14809 1866264
RX LS ack 877 76028
RX Total 86194 5328196
TX Failed 0 0
TX Hello 1 100
TX DB des 4 416
TX LS req 1 968
TX LS upd 1 108
TX LS ack 117 8668
TX Total 124 10260
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 0, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
Summary traffic statistics for process ID 888:
OSPF packets received/sent
Type Packets Bytes
RX Invalid 0 0
RX Hello 159187 7640968
RX DB des 10240 337720
RX LS req 5 216
RX LS upd 31899 4010656
RX LS ack 2511 201204
RX Total 203842 12190764
TX Failed 0 0
TX Hello 208493 20592264
TX DB des 10540 15808320
TX LS req 5 3112
TX LS upd 33998 5309252
TX LS ack 17571 1220144
TX Total 270607 42933092
OSPF header errors
Length 0, Instance ID 0, Checksum 0, Auth Type 0,
Version 0, Bad Source 0, No Virtual Link 0,
Area Mismatch 0, No Sham Link 0, Self Originated 0,
Duplicate ID 0, Hello 0, MTU Mismatch 0,
Nbr Ignored 2682, LLS 0, Unknown Neighbor 0,
Authentication 0, TTL Check Fail 0, Adjacency Throttle 0,
BFD 0, Test discard 0
OSPF LSA errors
Type 0, Length 0, Data 0, Checksum 0
'''}
def test_show_ip_ospf_traffic_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfTraffic(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_ospf_traffic_full1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output1)
obj = ShowIpOspfTraffic(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowIpOspfTraffic(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfTraffic(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ============================================
# Unit test for:
# 'show ip ospf neighbor '
# 'show ip ospf neighbor {interface}'
# ============================================
class test_show_ip_ospf_neighbor(unittest.TestCase):
'''Unit test for:
"show ip ospf neighbor"
"show ip ospf neighbor {interface}"
'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'interfaces':
{'GigabitEthernet0/0/0':
{'neighbors':
{'172.18.197.242':
{'address': '172.19.197.93',
'dead_time': '00:00:32',
'priority': 1,
'state': 'FULL/BDR'},
'172.19.197.251':
{'address': '172.19.197.91',
'dead_time': '00:00:32',
'priority': 1,
'state': 'FULL/BDR'}}},
'GigabitEthernet0/0/2':
{'neighbors':
{'172.19.197.252':
{'address': '172.19.197.92',
'dead_time': '00:00:32',
'priority': 1,
'state': 'FULL/BDR'}}},
'GigabitEthernet0/0/3':
{'neighbors':
{'172.19.197.253':
{'address': '172.19.197.94',
'dead_time': '00:00:32',
'priority': 1,
'state': 'FULL/BDR'}}},
'GigabitEthernet0/0/4':
{'neighbors':
{'172.19.197.254':
{'address': '172.19.197.90',
'dead_time': '00:00:32',
'priority': 1,
'state': 'FULL/BDR'}}}}}
golden_output = {'execute.return_value':'''
Router#show ip ospf neighbor
Load for five secs: 2%/0%; one minute: 9%; five minutes: 15%
Time source is NTP, 20:44:07.304 EST Wed Nov 2 2016
Neighbor ID Pri State Dead Time Address Interface
172.18.197.242 1 FULL/BDR 00:00:32 172.19.197.93 GigabitEthernet0/0/0
172.19.197.251 1 FULL/BDR 00:00:32 172.19.197.91 GigabitEthernet0/0/0
172.19.197.252 1 FULL/BDR 00:00:32 172.19.197.92 GigabitEthernet0/0/2
172.19.197.253 1 FULL/BDR 00:00:32 172.19.197.94 GigabitEthernet0/0/3
172.19.197.254 1 FULL/BDR 00:00:32 172.19.197.90 GigabitEthernet0/0/4
Router#
'''}
golden_parsed_output2 = {
'interfaces': {
'GigabitEthernet4': {
'neighbors': {
'10.16.2.2': {
'address': '10.169.197.97',
'dead_time': '00:00:32',
'priority': 0,
'state': 'FULL/ -'}}}}}
golden_output2 = {'execute.return_value':'''
show ip ospf neighbor GigabitEthernet4
Neighbor ID Pri State Dead Time Address Interface
10.16.2.2 0 FULL/ - 00:00:32 10.169.197.97 GigabitEthernet4
'''}
def test_show_ip_ospf_neighbor_empty(self):
self.maxDiff= None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfNeighbor(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_ospf_neighbor_full1(self):
self.maxDiff = None
self.device=Mock(**self.golden_output)
obj=ShowIpOspfNeighbor(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_show_ip_ospf_neighbor_full2(self):
self.maxDiff = None
self.device=Mock(**self.golden_output2)
obj=ShowIpOspfNeighbor(device=self.device)
parsed_output = obj.parse(interface='GigabitEthernet4')
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ===========================================================
# Unit test for 'show ip ospf database router self-originate'
# ===========================================================
class test_show_ip_ospf_database_router_self_originate(unittest.TestCase):
'''Unit test for "show ip ospf database router self-originate" '''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'vrf': {
'default': {
'address_family': {
'ipv4': {
'instance': {
'65109': {
'areas': {
'0.0.0.8': {
'database': {
'lsa_types': {
1: {
'lsa_type': 1,
'lsas': {
'10.169.197.254 10.169.197.254': {
'adv_router': '10.169.197.254',
'lsa_id': '10.169.197.254',
'ospfv2': {
'body': {
'router': {
'links': {
'10.169.197.252': {
'link_data': '10.169.197.94',
'link_id': '10.169.197.252',
'num_mtid_metrics': 0,
'topologies': {
0: {
'metric': 65535,
'mt_id': 0,
'tos': 0,
}
},
'type': 'another '
'router '
'(point-to-point)'},
'10.169.197.254': {
'link_data': '255.255.255.255',
'link_id': '10.169.197.254',
'num_mtid_metrics': 0,
'topologies': {
0: {
'metric': 1,
'mt_id': 0,
'tos': 0}
},
'type': 'stub '
'network'},
'10.169.197.92': {
'link_data': '255.255.255.252',
'link_id': '10.169.197.92',
'num_mtid_metrics': 0,
'topologies': {
0: {
'metric': 1000,
'mt_id': 0,
'tos': 0}
},
'type': 'stub '
'network'}
},
'num_of_links': 3}
},
'header': {
'adv_router': '10.169.197.254',
'age': 1141,
'checksum': '0x1D38',
'length': 60,
'lsa_id': '10.169.197.254',
'option': 'None',
'option_desc': 'No '
'TOS-capability, '
'DC',
'seq_num': '80000031',
'type': 1}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
golden_output = {'execute.return_value':'''
Load for five secs: 1%/0%; one minute: 1%; five minutes: 1%
Time source is NTP, 00:59:52.329 EST Thu May 30 2019
OSPF Router with ID (10.169.197.254) (Process ID 65109)
Router Link States (Area 8)
Exception Flag: Announcing maximum link costs for topology Base with MTID 0
LS age: 1141
Options: (No TOS-capability, DC)
LS Type: Router Links
Link State ID: 10.169.197.254
Advertising Router: 10.169.197.254
LS Seq Number: 80000031
Checksum: 0x1D38
Length: 60
Number of Links: 3
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.169.197.254
(Link Data) Network Mask: 255.255.255.255
Number of MTID metrics: 0
TOS 0 Metrics: 1
Link connected to: another Router (point-to-point)
(Link ID) Neighboring Router ID: 10.169.197.252
(Link Data) Router Interface address: 10.169.197.94
Number of MTID metrics: 0
TOS 0 Metrics: 65535
Link connected to: a Stub Network
(Link ID) Network/subnet number: 10.169.197.92
(Link Data) Network Mask: 255.255.255.252
Number of MTID metrics: 0
TOS 0 Metrics: 1000
'''}
def test_show_ip_ospf_neighbor_empty(self):
self.maxDiff= None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfDatabaseRouterSelfOriginate(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_ospf_neighbor_full1(self):
self.maxDiff = None
self.device=Mock(**self.golden_output)
obj=ShowIpOspfDatabaseRouterSelfOriginate(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
# ===================================================
# Unit tests for:
# 'show ip ospf segment-routing global-block'
# 'show ip ospf {pid} segment-routing global-block'
# ===================================================
class show_ip_ospf_segment_routing_global_block(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
show ip ospf 1234 segment-routing global-block
OSPF Router with ID (10.4.1.1) (Process ID 1234)
OSPF Segment Routing Global Blocks in Area 3
Router ID: SR Capable: SR Algorithm: SRGB Base: SRGB Range: SID/Label:
*10.4.1.1 Yes SPF,StrictSPF 16000 8000 Label
10.16.2.2 Yes SPF,StrictSPF 16000 8000 Label
'''}
golden_parsed_output = {
'process_id': {
1234: {
'router_id': '10.4.1.1',
'area': 3,
'routers': {
'10.4.1.1': {
'router_id': '10.4.1.1',
'sr_capable': 'Yes',
'sr_algorithm': 'SPF,StrictSPF',
'srgb_base': 16000,
'srgb_range': 8000,
'sid_label': 'Label'
},
'10.16.2.2': {
'router_id': '10.16.2.2',
'sr_capable': 'Yes',
'sr_algorithm': 'SPF,StrictSPF',
'srgb_base': 16000,
'srgb_range': 8000,
'sid_label': 'Label'
}
}
}
}
}
golden_output_2 = {'execute.return_value': '''
show ip ospf segment-routing global-block
OSPF Router with ID (10.4.1.1) (Process ID 1)
OSPF Segment Routing Global Blocks in Area 0
Router ID: SR Capable: SR Algorithm: SRGB Base: SRGB Range: SID/Label:
*10.4.1.1 No
10.16.2.2 No
10.36.3.3 No
'''}
golden_parsed_output_2 = {
'process_id': {
1: {
'router_id': '10.4.1.1',
'area': 0,
'routers': {
'10.4.1.1': {
'router_id': '10.4.1.1',
'sr_capable': 'No'
},
'10.16.2.2': {
'router_id': '10.16.2.2',
'sr_capable': 'No'
},
'10.36.3.3': {
'router_id': '10.36.3.3',
'sr_capable': 'No'
}
}
}
}
}
def test_show_ip_ospf_segment_routing_empty(self):
self.maxDiff= None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfSegmentRoutingGlobalBlock(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_ospf_segment_routing(self):
self.maxDiff = None
self.device=Mock(**self.golden_output)
obj=ShowIpOspfSegmentRoutingGlobalBlock(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_show_ip_ospf_segment_routing_pid(self):
self.maxDiff = None
self.device=Mock(**self.golden_output)
obj=ShowIpOspfSegmentRoutingGlobalBlock(device=self.device)
parsed_output = obj.parse(process_id=1234)
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_show_ip_ospf_segment_routing_pid2(self):
self.maxDiff = None
self.device=Mock(**self.golden_output_2)
obj=ShowIpOspfSegmentRoutingGlobalBlock(device=self.device)
parsed_output = obj.parse(process_id=1234)
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class test_show_ip_ospf_segment_routing_adjacency_sid(unittest.TestCase):
''' Test case for command:
* show ip ospf {bgp_as} segment-routing adjacency-sid
'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output_1 = {'execute.return_value': '''
PE1#show ip ospf 65109 segment-routing adjacency-sid
OSPF Router with ID (10.4.1.1) (Process ID 65109)
Flags: S - Static, D - Dynamic, P - Protected, U - Unprotected, G - Group, L - Adjacency Lost
Adj-Sid Neighbor ID Interface Neighbor Addr Flags Backup Nexthop Backup Interface
-------- --------------- ------------------ --------------- ------- --------------- ------------------
16 10.16.2.2 Gi0/1/2 192.168.154.2 D U
17 10.16.2.2 Gi0/1/1 192.168.4.2 D U
18 10.16.2.2 Gi0/1/0 192.168.111.2 D U
19 10.16.2.2 Te0/0/0 192.168.220.2 D U
'''}
parsed_output_1 = {
'process_id': {
'65109': {
'router_id': '10.4.1.1',
'adjacency_sids': {
'16': {
'flags': 'D U',
'interface': 'GigabitEthernet0/1/2',
'neighbor_address': '192.168.154.2',
'neighbor_id': '10.16.2.2'},
'17': {
'flags': 'D U',
'interface': 'GigabitEthernet0/1/1',
'neighbor_address': '192.168.4.2',
'neighbor_id': '10.16.2.2'},
'18': {
'flags': 'D U',
'interface': 'GigabitEthernet0/1/0',
'neighbor_address': '192.168.111.2',
'neighbor_id': '10.16.2.2'},
'19': {
'flags': 'D U',
'interface': 'TenGigabitEthernet0/0/0',
'neighbor_address': '192.168.220.2',
'neighbor_id': '10.16.2.2'}}}}}
def test_show_ip_ospf_segment_routing_empty(self):
self.maxDiff = None
self.device=Mock(**self.empty_output)
obj=ShowIpOspfSegmentRoutingAdjacencySid(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_ospf_segment_routing_1(self):
self.maxDiff = None
self.device=Mock(**self.golden_output_1)
obj=ShowIpOspfSegmentRoutingAdjacencySid(device=self.device)
parsed_output = obj.parse(process_id=65109)
self.assertEqual(parsed_output, self.parsed_output_1)
# ================================================
# Unit test for 'show ip ospf fast-reroute ti-lfa'
# ================================================
class test_show_ip_ospf_fast_reroute_ti_lfa(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
'ospf_object': {
'Process ID (65109)': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'Area 8': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'Loopback0': {
'ipfrr_enabled': 'no',
'sr_enabled': 'no',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'GigabitEthernet0/1/2': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'GigabitEthernet0/1/1': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'GigabitEthernet0/1/0': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'TenGigabitEthernet0/0/': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
'AS external': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'no',
},
},
},
},
}
golden_output = {'execute.return_value': '''
show ip ospf fast-reroute ti-lfa
OSPF Router with ID (10.4.1.1) (Process ID 65109)
OSPF IPFRR SR TI-LFA TI-LFA
Object enabled enabled configured enabled
--------------------------------------------------------------------
Process ID (65109) no yes no no
Area 8 no yes no no
Loopback0 no no no no
GigabitEthernet0/1/2 no yes no no
GigabitEthernet0/1/1 no yes no no
GigabitEthernet0/1/0 no yes no no
TenGigabitEthernet0/0/ no yes no no
AS external no yes no no
'''}
golden_parsed_output2 = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
'ospf_object': {
'Process ID (65109)': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'yes',
'ti_lfa_enabled': 'yes (inactive)',
},
'Area 8': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'yes',
'ti_lfa_configured': 'yes',
'ti_lfa_enabled': 'yes',
},
'Loopback0': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'no',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'yes (inactive)',
},
'GigabitEthernet5': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'yes',
},
'GigabitEthernet4': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'yes',
},
'GigabitEthernet3': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'yes',
},
'GigabitEthernet2': {
'ipfrr_enabled': 'yes',
'sr_enabled': 'yes',
'ti_lfa_configured': 'no',
'ti_lfa_enabled': 'yes',
},
'AS external': {
'ipfrr_enabled': 'no',
'sr_enabled': 'yes',
'ti_lfa_configured': 'yes',
'ti_lfa_enabled': 'yes (inactive)',
},
},
},
},
}
golden_output2 = {'execute.return_value': '''
show ip ospf fast-reroute ti-lfa
OSPF Router with ID (10.4.1.1) (Process ID 65109)
OSPF IPFRR SR TI-LFA TI-LFA
Object enabled enabled configured enabled
--------------------------------------------------------------------
Process ID (65109) no yes yes yes (inactive)
Area 8 yes yes yes yes
Loopback0 yes no no yes (inactive)
GigabitEthernet5 yes yes no yes
GigabitEthernet4 yes yes no yes
GigabitEthernet3 yes yes no yes
GigabitEthernet2 yes yes no yes
AS external no yes yes yes (inactive)
'''}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowIpOspfFastRerouteTiLfa(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpOspfFastRerouteTiLfa(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output)
def test_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfFastRerouteTiLfa(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output2)
# ===================================================================
# Unit test for 'show ip ospf segment-routing protected-adjacencies'
# ===================================================================
class test_show_ip_ospf_segment_routing_protected_adjacencies(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
show ip ospf segment-routing protected-adjacencies
OSPF Router with ID (10.4.1.1) (Process ID 65109)
Area with ID (8)
Neighbor ID Interface Address Adj-Sid Backup Nexthop Backup Interface
--------------- ------------------ --------------- ------------ --------------- ------------------
10.151.22.22 Gi5 10.0.0.25 20 10.0.0.9 Gi3
10.151.22.22 Gi4 10.0.0.13 21 10.0.0.9 Gi3
10.229.11.11 Gi3 10.0.0.9 22 10.0.0.13 Gi4
'''}
golden_parsed_output = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
'areas': {
'0.0.0.8': {
'neighbors': {
'10.151.22.22': {
'interfaces': {
'GigabitEthernet5': {
'address': '10.0.0.25',
'adj_sid': 20,
'backup_nexthop': '10.0.0.9',
'backup_interface': 'GigabitEthernet3',
},
'GigabitEthernet4': {
'address': '10.0.0.13',
'adj_sid': 21,
'backup_nexthop': '10.0.0.9',
'backup_interface': 'GigabitEthernet3',
},
},
},
'10.229.11.11': {
'interfaces': {
'GigabitEthernet3': {
'address': '10.0.0.9',
'adj_sid': 22,
'backup_nexthop': '10.0.0.13',
'backup_interface': 'GigabitEthernet4',
},
},
},
},
},
},
},
},
}
golden_output_2 = {'execute.return_value': '''
PE1#show ip ospf segment-routing protected-adjacencies
OSPF Router with ID (10.4.1.1) (Process ID 65109)
'''}
parsed_output_2 = {
'process_id': {
65109: {
'router_id': '10.4.1.1'
}
}
}
golden_output_3 = {'execute.return_value':'''
show ip ospf segment-routing protected-adjacencies
Load for five secs: 0%/0%; one minute: 1%; five minutes: 1%
Time source is NTP, 15:31:18.236 EST Thu Oct 31 2019
OSPF Router with ID (10.16.2.2) (Process ID 65109)
Area with ID (8)
Neighbor ID Interface Address Adj-Sid Backup Nexthop Backup Interface
--------------- ------------------ --------------- ------------ --------------- ------------------
10.4.1.1 Gi0/1/6 10.16.2.2 17
'''}
parsed_output_3 = {
'process_id': {
65109: {
'router_id': '10.16.2.2',
'areas': {
'0.0.0.8': {
'neighbors': {
'10.4.1.1': {
'interfaces': {
'GigabitEthernet0/1/6': {
'address': '10.16.2.2',
'adj_sid': 17}}}}}}}}}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowIpOspfSegmentRoutingProtectedAdjacencies(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpOspfSegmentRoutingProtectedAdjacencies(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output)
def test_golden_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIpOspfSegmentRoutingProtectedAdjacencies(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
def test_golden_3(self):
self.device = Mock(**self.golden_output_3)
obj = ShowIpOspfSegmentRoutingProtectedAdjacencies(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_3)
class test_show_ip_ospf_segment_routing_sid_database(unittest.TestCase):
""" Test case for command:
* show ip ospf segment-routing sid-database
"""
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
show ip ospf segment-routing sid-database
OSPF Router with ID (10.4.1.1) (Process ID 1234)
OSPF Segment Routing SIDs
Codes: L - local, N - label not programmed,
M - mapping-server
SID Prefix Adv-Rtr-Id Area-Id Type Algo
-------------- ------------------ --------------- ------- -------- ----
1 (L) 10.4.1.1/32 10.4.1.1 8 Intra 0
2 10.16.2.2/32 10.16.2.2 8 Intra 0
'''}
golden_parsed_output = {
'process_id': {
1234: {
'router_id': '10.4.1.1',
'sids': {
'total_entries': 2,
1: {
'index': {
1: {
'prefix': '10.4.1.1/32',
'codes': 'L',
'adv_rtr_id': '10.4.1.1',
'area_id': '0.0.0.8',
'type': 'Intra',
'algo': 0
}
}
},
2: {
'index': {
1: {
'prefix': '10.16.2.2/32',
'adv_rtr_id': '10.16.2.2',
'area_id': '0.0.0.8',
'type': 'Intra',
'algo': 0
}
}
}
}
}
}
}
golden_parsed_output2 = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
},
},
}
golden_output2 = {'execute.return_value': '''
show ip ospf segment-routing sid-database
OSPF Router with ID (10.4.1.1) (Process ID 65109)
'''}
golden_parsed_output3 = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
'sids': {
'total_entries': 4,
1: {
'index': {
1: {
'prefix': '10.4.1.1/32',
'codes': 'L',
'adv_rtr_id': '10.4.1.1',
'area_id': '0.0.0.8',
'type': 'Intra',
'algo': 0
},
2: {
'prefix': '10.4.1.2/32',
'adv_rtr_id': '10.4.1.2',
'area_id': '0.0.0.8',
'type': 'Intra',
'algo': 0
}
}
},
11: {
'index': {
1: {
'prefix': '10.4.1.2/32',
'adv_rtr_id': '10.4.1.2',
'area_id': '0.0.0.8',
'type': 'Intra',
'algo': 0
}
}
},
45: {
'index': {
1: {
'prefix': '10.4.1.3/32',
'codes': 'M',
'type': 'Unknown',
'algo': 0
}
}
}
}
}
}
}
golden_output3 = {'execute.return_value': '''
show ip ospf segment-routing sid-database
OSPF Router with ID (10.4.1.1) (Process ID 65109)
OSPF Segment Routing SIDs
Codes: L - local, N - label not programmed,
M - mapping-server
SID Prefix Adv-Rtr-Id Area-Id Type Algo
-------------- ------------------ --------------- ------- -------- ----
1 (L) 10.4.1.1/32 10.4.1.1 8 Intra 0
10.4.1.2/32 10.4.1.2 8 Intra 0
11 10.4.1.2/32 10.4.1.2 8 Intra 0
45 (M) 10.4.1.3/32 Unknown 0
'''}
def test_empty(self):
self.maxDiff = None
self.device = Mock(**self.empty_output)
obj = ShowIpOspfSegmentRoutingSidDatabase(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowIpOspfSegmentRoutingSidDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfSegmentRoutingSidDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
def test_golden3(self):
self.maxDiff = None
self.device = Mock(**self.golden_output3)
obj = ShowIpOspfSegmentRoutingSidDatabase(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output3)
# =============================================
# Unit test for 'show ip ospf segment-routing'
# =============================================
class test_show_ip_ospf_segment_routing(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
show ip ospf segment-routing
OSPF Router with ID (10.16.2.2) (Process ID 65109)
Global segment-routing state: Enabled
Segment Routing enabled:
Area Topology name Forwarding Strict SPF
8 Base MPLS Capable
AS external Base MPLS Not applicable
SR Attributes
Prefer non-SR (LDP) Labels
Do not advertise Explicit Null
Global Block (SRGB):
Range: 16000 - 23999
State: Created
Local Block (SRLB):
Range: 15000 - 15999
State: Created
Registered with SR App, client handle: 2
SR algo 0 Connected map notifications active (handle 0x0), bitmask 0x1
SR algo 0 Active policy map notifications active (handle 0x2), bitmask 0xC
SR algo 1 Connected map notifications active (handle 0x1), bitmask 0x1
SR algo 1 Active policy map notifications active (handle 0x3), bitmask 0xC
Registered with MPLS, client-id: 100
Max labels: platform 16, available 13
Max labels pushed by OSPF: uloop tunnels 10, TI-LFA tunnels 10
mfi label reservation ack not pending
Bind Retry timer not running
Adj Label Bind Retry timer not running
sr-app locks requested: srgb 0, srlb 0
TEAPP:
TE Router ID 10.16.2.2
'''}
golden_parsed_output = {
'process_id': {
65109: {
'router_id': '10.16.2.2',
'sr_attributes': {
'sr_label_preferred': False,
'advertise_explicit_null': False,
},
'mfi_label_reservation_ack_pending': False,
'bind_retry_timer_running': False,
'adj_label_bind_retry_timer_running': False,
'global_segment_routing_state': 'Enabled',
'segment_routing_enabled': {
'area': {
'0.0.0.8': {
'topology_name': 'Base',
'forwarding': 'MPLS',
'strict_spf': 'Capable',
},
'AS external': {
'topology_name': 'Base',
'forwarding': 'MPLS',
'strict_spf': 'Not applicable',
},
},
},
'global_block_srgb': {
'range': {
'start': 16000,
'end': 23999,
},
'state': 'Created',
},
'local_block_srlb': {
'range': {
'start': 15000,
'end': 15999,
},
'state': 'Created',
},
'registered_with': {
'SR App': {
'client_handle': 2,
'sr_algo': {
0: {
'connected_map_notifications_active': {
'handle': '0x0',
'bit_mask': '0x1',
},
'active_policy_map_notifications_active': {
'handle': '0x2',
'bit_mask': '0xC',
},
},
1: {
'connected_map_notifications_active': {
'handle': '0x1',
'bit_mask': '0x1',
},
'active_policy_map_notifications_active': {
'handle': '0x3',
'bit_mask': '0xC',
},
},
},
},
'MPLS': {
'client_id': 100,
},
},
'max_labels': {
'platform': 16,
'available': 13,
'pushed_by_ospf': {
'uloop_tunnels': 10,
'ti_lfa_tunnels': 10,
},
},
'srp_app_locks_requested': {
'srgb': 0,
'srlb': 0,
},
'teapp': {
'te_router_id': '10.16.2.2',
},
},
},
}
golden_output2 = {'execute.return_value': '''
show ip ospf segment-routing
OSPF Router with ID (10.4.1.1) (Process ID 65109)
Global segment-routing state: Not configured
'''}
golden_parsed_output2 = {
'process_id': {
65109: {
'router_id': '10.4.1.1',
'sr_attributes': {
'sr_label_preferred': True,
'advertise_explicit_null': True,
},
'mfi_label_reservation_ack_pending': True,
'bind_retry_timer_running': True,
'adj_label_bind_retry_timer_running': True,
},
},
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowIpOspfSegmentRouting(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpOspfSegmentRouting(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output)
def test_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIpOspfSegmentRouting(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output2)
if __name__ == '__main__':
unittest.main() | 59.155287 | 157 | 0.260618 | 46,506 | 748,551 | 4.078033 | 0.03105 | 0.006232 | 0.005589 | 0.006644 | 0.880672 | 0.852362 | 0.818758 | 0.790881 | 0.75838 | 0.724302 | 0 | 0.140291 | 0.669914 | 748,551 | 12,654 | 158 | 59.155287 | 0.627267 | 0.005847 | 0 | 0.713393 | 0 | 0.009077 | 0.337872 | 0.010137 | 0 | 0 | 0.005619 | 0 | 0.006337 | 1 | 0.007279 | false | 0.000942 | 0.000514 | 0.000942 | 0.022692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d4dca18731eb7e755f612629bb21595881af5747 | 59,251 | py | Python | test/python/test_identifiers.py | Shark-y/qiskit-sdk-py | c1361b823dc1a3fab76545e62975c2afb02e442d | [
"Apache-2.0"
] | null | null | null | test/python/test_identifiers.py | Shark-y/qiskit-sdk-py | c1361b823dc1a3fab76545e62975c2afb02e442d | [
"Apache-2.0"
] | 38 | 2017-08-04T09:57:36.000Z | 2017-08-23T10:35:32.000Z | test/python/test_identifiers.py | Shark-y/qiskit-sdk-py | c1361b823dc1a3fab76545e62975c2afb02e442d | [
"Apache-2.0"
] | 1 | 2017-08-18T08:22:50.000Z | 2017-08-18T08:22:50.000Z | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name,missing-docstring,broad-except
# Copyright 2018 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Non-string identifiers for circuit and record identifiers test"""
import unittest
from qiskit import (ClassicalRegister, QISKitError, QuantumCircuit,
QuantumRegister, QuantumProgram)
from .common import QiskitTestCase
class TestAnonymousIds(QiskitTestCase):
"""Circuits and records can have no name"""
def setUp(self):
self.QPS_SPECS_NONAMES = {
"circuits": [{
"quantum_registers": [{
"size": 3}],
"classical_registers": [{
"size": 3}]
}]
}
###############################################################
# Tests to initiate an build a quantum program with anonymous ids
###############################################################
def test_create_program_with_specs_nonames(self):
"""Test Quantum Object Factory creation using Specs definition
object with no names for circuit nor records.
"""
result = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
self.assertIsInstance(result, QuantumProgram)
def test_create_anonymous_classical_register(self):
"""Test create_classical_register with no name.
"""
q_program = QuantumProgram()
cr = q_program.create_classical_register(size=3)
self.assertIsInstance(cr, ClassicalRegister)
def test_create_anonymous_quantum_register(self):
"""Test create_quantum_register with no name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(size=3)
self.assertIsInstance(qr, QuantumRegister)
def test_create_classical_registers_noname(self):
"""Test create_classical_registers with no name
"""
q_program = QuantumProgram()
classical_registers = [{"size": 4},
{"size": 2}]
crs = q_program.create_classical_registers(classical_registers)
for i in crs:
self.assertIsInstance(i, ClassicalRegister)
def test_create_quantum_registers_noname(self):
"""Test create_quantum_registers with no name.
"""
q_program = QuantumProgram()
quantum_registers = [{"size": 4},
{"size": 2}]
qrs = q_program.create_quantum_registers(quantum_registers)
for i in qrs:
self.assertIsInstance(i, QuantumRegister)
def test_create_circuit_noname(self):
"""Test create_circuit with no name
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(size=3)
cr = q_program.create_classical_register(size=3)
qc = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
self.assertIsInstance(qc, QuantumCircuit)
def test_create_several_circuits_noname(self):
"""Test create_circuit with several inputs and without names.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(size=3)
cr1 = q_program.create_classical_register(size=3)
qr2 = q_program.create_quantum_register(size=3)
cr2 = q_program.create_classical_register(size=3)
qc1 = q_program.create_circuit(qregisters=[qr1], cregisters=[cr1])
qc2 = q_program.create_circuit(qregisters=[qr2], cregisters=[cr2])
qc3 = q_program.create_circuit(qregisters=[qr1, qr2], cregisters=[cr1, cr2])
self.assertIsInstance(qc1, QuantumCircuit)
self.assertIsInstance(qc2, QuantumCircuit)
self.assertIsInstance(qc3, QuantumCircuit)
def test_get_register_and_circuit_names_nonames(self):
"""Get the names of the circuits and registers after create them without a name
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(size=3)
cr1 = q_program.create_classical_register(size=3)
qr2 = q_program.create_quantum_register(size=3)
cr2 = q_program.create_classical_register(size=3)
q_program.create_circuit(qregisters=[qr1], cregisters=[cr1])
q_program.create_circuit(qregisters=[qr2], cregisters=[cr2])
q_program.create_circuit(qregisters=[qr1, qr2], cregisters=[cr1, cr2])
qrn = q_program.get_quantum_register_names()
crn = q_program.get_classical_register_names()
qcn = q_program.get_circuit_names()
self.assertEqual(len(qrn), 2)
self.assertEqual(len(crn), 2)
self.assertEqual(len(qcn), 3)
def test_get_circuit_noname(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qc = q_program.get_circuit()
self.assertIsInstance(qc, QuantumCircuit)
def test_get_quantum_register_noname(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qr = q_program.get_quantum_register()
self.assertIsInstance(qr, QuantumRegister)
def test_get_classical_register_noname(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
cr = q_program.get_classical_register()
self.assertIsInstance(cr, ClassicalRegister)
def test_get_qasm_noname(self):
"""Test the get_qasm using an specification without names.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qc = q_program.get_circuit()
qrn = list(q_program.get_quantum_register_names())
self.assertEqual(len(qrn), 1)
qr = q_program.get_quantum_register(qrn[0])
crn = list(q_program.get_classical_register_names())
self.assertEqual(len(crn), 1)
cr = q_program.get_classical_register(crn[0])
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.cx(qr[1], qr[2])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm()
self.assertEqual(len(result), len(qrn[0]) * 9 + len(crn[0]) * 4 + 147)
def test_get_qasms_noname(self):
"""Test the get_qasms from a qprogram without names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(size=3)
cr = q_program.create_classical_register(size=3)
qc1 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc2 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc1.h(qr[0])
qc1.cx(qr[0], qr[1])
qc1.cx(qr[1], qr[2])
qc1.measure(qr[0], cr[0])
qc1.measure(qr[1], cr[1])
qc1.measure(qr[2], cr[2])
qc2.h(qr)
qc2.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc2.measure(qr[2], cr[2])
results = dict(zip(q_program.get_circuit_names(), q_program.get_qasms()))
qr_name_len = len(qr.openqasm_name)
cr_name_len = len(cr.openqasm_name)
self.assertEqual(len(results[qc1.name]), qr_name_len * 9 + cr_name_len * 4 + 147)
self.assertEqual(len(results[qc2.name]), qr_name_len * 7 + cr_name_len * 4 + 137)
def test_get_qasm_all_gates(self):
"""Test the get_qasm for more gates, using an specification without names.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qc = q_program.get_circuit()
qr = q_program.get_quantum_register()
cr = q_program.get_classical_register()
qc.u1(0.3, qr[0])
qc.u2(0.2, 0.1, qr[1])
qc.u3(0.3, 0.2, 0.1, qr[2])
qc.s(qr[1])
qc.s(qr[2]).inverse()
qc.cx(qr[1], qr[2])
qc.barrier()
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.x(qr[2]).c_if(cr, 0)
qc.y(qr[2]).c_if(cr, 1)
qc.z(qr[2]).c_if(cr, 2)
qc.barrier(qr)
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm()
self.assertEqual(len(result), (len(qr.openqasm_name) * 23 +
len(cr.openqasm_name) * 7 +
385))
###############################################################
# Test for compile
###############################################################
def test_compile_program_noname(self):
"""Test compile with a no name.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qc = q_program.get_circuit()
qr = q_program.get_quantum_register()
cr = q_program.get_classical_register()
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
out = q_program.compile()
self.log.info(out)
self.assertEqual(len(out), 3)
def test_get_execution_list_noname(self):
"""Test get_execution_list for circuits without name.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qc = q_program.get_circuit()
qr = q_program.get_quantum_register()
cr = q_program.get_classical_register()
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qobj = q_program.compile()
result = q_program.get_execution_list(qobj, print_func=self.log.info)
self.assertEqual(len(result), 1)
def test_change_circuit_qobj_after_compile_noname(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_NONAMES)
qr = q_program.get_quantum_register()
cr = q_program.get_classical_register()
qc2 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc3 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc2.h(qr[0])
qc2.cx(qr[0], qr[1])
qc2.cx(qr[0], qr[2])
qc3.h(qr)
qc2.measure(qr, cr)
qc3.measure(qr, cr)
circuits = [qc2.name, qc3.name]
shots = 1024 # the number of shots in the experiment.
backend = 'local_qasm_simulator'
config = {'seed': 10, 'shots': 1, 'xvals': [1, 2, 3, 4]}
qobj1 = q_program.compile(circuits, backend=backend, shots=shots, seed=88, config=config)
qobj1['circuits'][0]['config']['shots'] = 50
qobj1['circuits'][0]['config']['xvals'] = [1, 1, 1]
config['shots'] = 1000
config['xvals'][0] = 'only for qobj2'
qobj2 = q_program.compile(circuits, backend=backend, shots=shots, seed=88, config=config)
self.assertTrue(qobj1['circuits'][0]['config']['shots'] == 50)
self.assertTrue(qobj1['circuits'][1]['config']['shots'] == 1)
self.assertTrue(qobj1['circuits'][0]['config']['xvals'] == [1, 1, 1])
self.assertTrue(qobj1['circuits'][1]['config']['xvals'] == [1, 2, 3, 4])
self.assertTrue(qobj1['config']['shots'] == 1024)
self.assertTrue(qobj2['circuits'][0]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][1]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][0]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
self.assertTrue(qobj2['circuits'][1]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
def test_add_circuit_noname(self):
"""Test add two circuits without names. Also tests get_counts without circuit name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(size=2)
cr = q_program.create_classical_register(size=2)
qc1 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc2 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1 + qc2
q_program.add_circuit(quantum_circuit=new_circuit)
backend = 'local_qasm_simulator' # the backend to run on
shots = 1024 # the number of shots in the experiment.
result = q_program.execute(backend=backend, shots=shots, seed=78)
counts = result.get_counts(new_circuit.name)
target = {'00': shots / 2, '01': shots / 2}
threshold = 0.025 * shots
self.assertDictAlmostEqual(counts, target, threshold)
self.assertRaises(QISKitError, result.get_counts)
class TestZeroIds(QiskitTestCase):
"""Circuits and records can have zero as names"""
def setUp(self):
self.QPS_SPECS_ZEROS = {
"circuits": [{
"name": 0,
"quantum_registers": [{
"name": 0,
"size": 3}],
"classical_registers": [{
"name": "",
"size": 3}]
}]
}
###############################################################
# Tests to initiate an build a quantum program with zeros ids
###############################################################
def test_create_program_with_specs(self):
"""Test Quantum Object Factory creation using Specs definition
object with zeros names for circuit nor records.
"""
result = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
self.assertIsInstance(result, QuantumProgram)
def test_create_classical_register(self):
"""Test create_classical_register with zero name
"""
q_program = QuantumProgram()
cr = q_program.create_classical_register(0, 3)
self.assertIsInstance(cr, ClassicalRegister)
def test_create_quantum_register(self):
"""Test create_quantum_register with zero name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(0, 3)
self.assertIsInstance(qr, QuantumRegister)
def test_fail_create_classical_register_name(self):
"""Test duplicated create_quantum_register with zeros as names.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register(0, 3)
self.assertIsInstance(cr1, ClassicalRegister)
self.assertRaises(QISKitError,
q_program.create_classical_register, 0, 2)
def test_create_quantum_register_same(self):
"""Test create_quantum_register of same name (a zero) and size.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(0, 3)
qr2 = q_program.create_quantum_register(0, 3)
self.assertIs(qr1, qr2)
def test_create_classical_register_same(self):
"""Test create_classical_register of same name (a zero) and size.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register(0, 3)
cr2 = q_program.create_classical_register(0, 3)
self.assertIs(cr1, cr2)
def test_create_classical_registers(self):
"""Test create_classical_registers with 0 as a name.
"""
q_program = QuantumProgram()
classical_registers = [{"name": 0, "size": 4},
{"name": "", "size": 2}]
crs = q_program.create_classical_registers(classical_registers)
for i in crs:
self.assertIsInstance(i, ClassicalRegister)
def test_create_quantum_registers(self):
"""Test create_quantum_registers with 0 as names
"""
q_program = QuantumProgram()
quantum_registers = [{"name": 0, "size": 4},
{"name": "", "size": 2}]
qrs = q_program.create_quantum_registers(quantum_registers)
for i in qrs:
self.assertIsInstance(i, QuantumRegister)
def test_destroy_classical_register(self):
"""Test destroy_classical_register with 0 as name."""
q_program = QuantumProgram()
_ = q_program.create_classical_register(0, 3)
self.assertIn(0, q_program.get_classical_register_names())
q_program.destroy_classical_register(0)
self.assertNotIn(0, q_program.get_classical_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_classical_register(0)
self.assertIn('Not present', str(context.exception))
def test_destroy_quantum_register(self):
"""Test destroy_quantum_register with 0 as name."""
q_program = QuantumProgram()
_ = q_program.create_quantum_register(0, 3)
self.assertIn(0, q_program.get_quantum_register_names())
q_program.destroy_quantum_register(0)
self.assertNotIn(0, q_program.get_quantum_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_quantum_register(0)
self.assertIn('Not present', str(context.exception))
def test_create_circuit(self):
"""Test create_circuit with 0 as a name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(0, 3)
cr = q_program.create_classical_register("", 3)
qc = q_program.create_circuit(0, [qr], [cr])
self.assertIsInstance(qc, QuantumCircuit)
def test_create_several_circuits(self):
"""Test create_circuit with several inputs with int names.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(10, 3)
cr1 = q_program.create_classical_register(20, 3)
qr2 = q_program.create_quantum_register(11, 3)
cr2 = q_program.create_classical_register(21, 3)
qc1 = q_program.create_circuit(30, [qr1], [cr1])
qc2 = q_program.create_circuit(31, [qr2], [cr2])
qc3 = q_program.create_circuit(32, [qr1, qr2], [cr1, cr2])
self.assertIsInstance(qc1, QuantumCircuit)
self.assertIsInstance(qc2, QuantumCircuit)
self.assertIsInstance(qc3, QuantumCircuit)
def test_destroy_circuit(self):
"""Test destroy_circuit with an int name."""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(2, 3)
cr = q_program.create_classical_register(1, 3)
_ = q_program.create_circuit(10, [qr], [cr])
self.assertIn(10, q_program.get_circuit_names())
q_program.destroy_circuit(10)
self.assertNotIn(10, q_program.get_circuit_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_circuit(10)
self.assertIn('Not present', str(context.exception))
def test_get_register_and_circuit_names(self):
"""Get the names of the circuits and registers when their names are ints.
"""
qr1n = 10
qr2n = 11
cr1n = 12
cr2n = 13
qc1n = 14
qc2n = 15
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(qr1n, 3)
cr1 = q_program.create_classical_register(cr1n, 3)
qr2 = q_program.create_quantum_register(qr2n, 3)
cr2 = q_program.create_classical_register(cr2n, 3)
q_program.create_circuit(qc1n, [qr1], [cr1])
q_program.create_circuit(qc2n, [qr2], [cr2])
q_program.create_circuit(qc2n, [qr1, qr2], [cr1, cr2])
qrn = q_program.get_quantum_register_names()
crn = q_program.get_classical_register_names()
qcn = q_program.get_circuit_names()
self.assertCountEqual(qrn, [qr1n, qr2n])
self.assertCountEqual(crn, [cr1n, cr2n])
self.assertCountEqual(qcn, [qc1n, qc2n])
def test_get_qasm(self):
"""Test the get_qasm with int name. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
qc = q_program.get_circuit(0)
qr = q_program.get_quantum_register(0)
cr = q_program.get_classical_register("")
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.cx(qr[1], qr[2])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm(0)
self.assertEqual(len(result), (147 +
len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4))
def test_get_qasms(self):
"""Test the get_qasms with int names. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(10, 3)
cr = q_program.create_classical_register(20, 3)
qc1 = q_program.create_circuit(101, [qr], [cr])
qc2 = q_program.create_circuit(102, [qr], [cr])
qc1.h(qr[0])
qc1.cx(qr[0], qr[1])
qc1.cx(qr[1], qr[2])
qc1.measure(qr[0], cr[0])
qc1.measure(qr[1], cr[1])
qc1.measure(qr[2], cr[2])
qc2.h(qr)
qc2.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc2.measure(qr[2], cr[2])
result = q_program.get_qasms([101, 102])
self.assertEqual(len(result[0]), (147 +
len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4))
self.assertEqual(len(result[1]), (137 +
len(qr.openqasm_name) * 7 +
len(cr.openqasm_name) * 4))
def test_get_qasm_all_gates(self):
"""Test the get_qasm for more gates. Names are ints.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
qc = q_program.get_circuit(0)
qr = q_program.get_quantum_register(0)
cr = q_program.get_classical_register("")
qc.u1(0.3, qr[0])
qc.u2(0.2, 0.1, qr[1])
qc.u3(0.3, 0.2, 0.1, qr[2])
qc.s(qr[1])
qc.s(qr[2]).inverse()
qc.cx(qr[1], qr[2])
qc.barrier()
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.x(qr[2]).c_if(cr, 0)
qc.y(qr[2]).c_if(cr, 1)
qc.z(qr[2]).c_if(cr, 2)
qc.barrier(qr)
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm(0)
self.assertEqual(len(result), (385 +
len(qr.openqasm_name) * 23 +
len(cr.openqasm_name) * 7))
###############################################################
# Test for compile when names are integers
###############################################################
def test_compile_program(self):
"""Test compile_program. Names are integers
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
qc = q_program.get_circuit(0)
qr = q_program.get_quantum_register(0)
cr = q_program.get_classical_register("")
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
out = q_program.compile([0], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
self.log.info(out)
self.assertEqual(len(out), 3)
def test_get_execution_list(self):
"""Test get_execution_list with int names.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
qc = q_program.get_circuit(0)
qr = q_program.get_quantum_register(0)
cr = q_program.get_classical_register("")
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
qobj = q_program.compile([0], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
result = q_program.get_execution_list(qobj, print_func=self.log.info)
self.log.info(result)
self.assertEqual(result, [0])
def test_change_circuit_qobj_after_compile(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_ZEROS)
qr = q_program.get_quantum_register(0)
cr = q_program.get_classical_register("")
qc2 = q_program.create_circuit(102, [qr], [cr])
qc3 = q_program.create_circuit(103, [qr], [cr])
qc2.h(qr[0])
qc2.cx(qr[0], qr[1])
qc2.cx(qr[0], qr[2])
qc3.h(qr)
qc2.measure(qr, cr)
qc3.measure(qr, cr)
circuits = [102, 103]
shots = 1024 # the number of shots in the experiment.
backend = 'local_qasm_simulator'
config = {'seed': 10, 'shots': 1, 'xvals': [1, 2, 3, 4]}
qobj1 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
qobj1['circuits'][0]['config']['shots'] = 50
qobj1['circuits'][0]['config']['xvals'] = [1, 1, 1]
config['shots'] = 1000
config['xvals'][0] = 'only for qobj2'
qobj2 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
self.assertTrue(qobj1['circuits'][0]['config']['shots'] == 50)
self.assertTrue(qobj1['circuits'][1]['config']['shots'] == 1)
self.assertTrue(qobj1['circuits'][0]['config']['xvals'] == [1, 1, 1])
self.assertTrue(qobj1['circuits'][1]['config']['xvals'] == [1, 2, 3, 4])
self.assertTrue(qobj1['config']['shots'] == 1024)
self.assertTrue(qobj2['circuits'][0]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][1]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][0]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
self.assertTrue(qobj2['circuits'][1]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
def test_add_circuit(self):
"""Test add two circuits with zero names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(0, 2)
cr = q_program.create_classical_register("", 2)
qc1 = q_program.create_circuit(0, [qr], [cr])
qc2 = q_program.create_circuit("", [qr], [cr])
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1 + qc2
q_program.add_circuit(1001, new_circuit)
circuits = [1001]
backend = 'local_qasm_simulator' # the backend to run on
shots = 1024 # the number of shots in the experiment.
result = q_program.execute(circuits, backend=backend, shots=shots, seed=78)
counts = result.get_counts(1001)
target = {'00': shots / 2, '01': shots / 2}
threshold = 0.025 * shots
self.assertDictAlmostEqual(counts, target, threshold)
class TestIntegerIds(QiskitTestCase):
"""Circuits and records can have integers as names"""
def setUp(self):
self.QPS_SPECS_INT = {
"circuits": [{
"name": 1,
"quantum_registers": [{
"name": 40,
"size": 3}],
"classical_registers": [{
"name": 50,
"size": 3}]
}]
}
###############################################################
# Tests to initiate an build a quantum program with integer ids
###############################################################
def test_create_program_with_specs(self):
"""Test Quantum Object Factory creation using Specs definition
object with int names for circuit nor records.
"""
result = QuantumProgram(specs=self.QPS_SPECS_INT)
self.assertIsInstance(result, QuantumProgram)
def test_create_classical_register(self):
"""Test create_classical_register with int name
"""
q_program = QuantumProgram()
cr = q_program.create_classical_register(42, 3)
self.assertIsInstance(cr, ClassicalRegister)
def test_create_quantum_register(self):
"""Test create_quantum_register with int name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(32, 3)
self.assertIsInstance(qr, QuantumRegister)
def test_fail_create_classical_register_name(self):
"""Test duplicated create_quantum_register with int as names.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register(2, 3)
self.assertIsInstance(cr1, ClassicalRegister)
self.assertRaises(QISKitError,
q_program.create_classical_register, 2, 2)
def test_create_quantum_register_same(self):
"""Test create_quantum_register of same int name and size.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(1, 3)
qr2 = q_program.create_quantum_register(1, 3)
self.assertIs(qr1, qr2)
def test_create_classical_register_same(self):
"""Test create_classical_register of same int name and size.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register(2, 3)
cr2 = q_program.create_classical_register(2, 3)
self.assertIs(cr1, cr2)
def test_create_classical_registers(self):
"""Test create_classical_registers with int name.
"""
q_program = QuantumProgram()
classical_registers = [{"name": 1, "size": 4},
{"name": 2, "size": 2}]
crs = q_program.create_classical_registers(classical_registers)
for i in crs:
self.assertIsInstance(i, ClassicalRegister)
def test_create_quantum_registers(self):
"""Test create_quantum_registers with int names
"""
q_program = QuantumProgram()
quantum_registers = [{"name": 1, "size": 4},
{"name": 2, "size": 2}]
qrs = q_program.create_quantum_registers(quantum_registers)
for i in qrs:
self.assertIsInstance(i, QuantumRegister)
def test_destroy_classical_register(self):
"""Test destroy_classical_register with int name."""
q_program = QuantumProgram()
_ = q_program.create_classical_register(1, 3)
self.assertIn(1, q_program.get_classical_register_names())
q_program.destroy_classical_register(1)
self.assertNotIn(1, q_program.get_classical_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_classical_register(1)
self.assertIn('Not present', str(context.exception))
def test_destroy_quantum_register(self):
"""Test destroy_quantum_register with int name."""
q_program = QuantumProgram()
_ = q_program.create_quantum_register(1, 3)
self.assertIn(1, q_program.get_quantum_register_names())
q_program.destroy_quantum_register(1)
self.assertNotIn(1, q_program.get_quantum_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_quantum_register(1)
self.assertIn('Not present', str(context.exception))
def test_create_circuit(self):
"""Test create_circuit with int names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(1, 3)
cr = q_program.create_classical_register(2, 3)
qc = q_program.create_circuit(3, [qr], [cr])
self.assertIsInstance(qc, QuantumCircuit)
def test_create_several_circuits(self):
"""Test create_circuit with several inputs with int names.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(10, 3)
cr1 = q_program.create_classical_register(20, 3)
qr2 = q_program.create_quantum_register(11, 3)
cr2 = q_program.create_classical_register(21, 3)
qc1 = q_program.create_circuit(30, [qr1], [cr1])
qc2 = q_program.create_circuit(31, [qr2], [cr2])
qc3 = q_program.create_circuit(32, [qr1, qr2], [cr1, cr2])
self.assertIsInstance(qc1, QuantumCircuit)
self.assertIsInstance(qc2, QuantumCircuit)
self.assertIsInstance(qc3, QuantumCircuit)
def test_destroy_circuit(self):
"""Test destroy_circuit with an int name."""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(2, 3)
cr = q_program.create_classical_register(1, 3)
_ = q_program.create_circuit(10, [qr], [cr])
self.assertIn(10, q_program.get_circuit_names())
q_program.destroy_circuit(10)
self.assertNotIn(10, q_program.get_circuit_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_circuit(10)
self.assertIn('Not present', str(context.exception))
def test_get_register_and_circuit_names(self):
"""Get the names of the circuits and registers when their names are ints.
"""
qr1n = 10
qr2n = 11
cr1n = 12
cr2n = 13
qc1n = 14
qc2n = 15
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(qr1n, 3)
cr1 = q_program.create_classical_register(cr1n, 3)
qr2 = q_program.create_quantum_register(qr2n, 3)
cr2 = q_program.create_classical_register(cr2n, 3)
q_program.create_circuit(qc1n, [qr1], [cr1])
q_program.create_circuit(qc2n, [qr2], [cr2])
q_program.create_circuit(qc2n, [qr1, qr2], [cr1, cr2])
qrn = q_program.get_quantum_register_names()
crn = q_program.get_classical_register_names()
qcn = q_program.get_circuit_names()
self.assertCountEqual(qrn, [qr1n, qr2n])
self.assertCountEqual(crn, [cr1n, cr2n])
self.assertCountEqual(qcn, [qc1n, qc2n])
def test_get_qasm(self):
"""Test the get_qasm with int name. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_INT)
qc = q_program.get_circuit(1)
qr = q_program.get_quantum_register(40)
cr = q_program.get_classical_register(50)
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.cx(qr[1], qr[2])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm(1)
self.assertEqual(len(result), (147 +
len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4))
def test_get_qasms(self):
"""Test the get_qasms with int names. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(10, 3)
cr = q_program.create_classical_register(20, 3)
qc1 = q_program.create_circuit(101, [qr], [cr])
qc2 = q_program.create_circuit(102, [qr], [cr])
qc1.h(qr[0])
qc1.cx(qr[0], qr[1])
qc1.cx(qr[1], qr[2])
qc1.measure(qr[0], cr[0])
qc1.measure(qr[1], cr[1])
qc1.measure(qr[2], cr[2])
qc2.h(qr)
qc2.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc2.measure(qr[2], cr[2])
result = q_program.get_qasms([101, 102])
self.assertEqual(len(result[0]), (147 +
len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4))
self.assertEqual(len(result[1]), (137 +
len(qr.openqasm_name) * 7 +
len(cr.openqasm_name) * 4))
def test_get_qasm_all_gates(self):
"""Test the get_qasm for more gates. Names are ints.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_INT)
qc = q_program.get_circuit(1)
qr = q_program.get_quantum_register(40)
cr = q_program.get_classical_register(50)
qc.u1(0.3, qr[0])
qc.u2(0.2, 0.1, qr[1])
qc.u3(0.3, 0.2, 0.1, qr[2])
qc.s(qr[1])
qc.s(qr[2]).inverse()
qc.cx(qr[1], qr[2])
qc.barrier()
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.x(qr[2]).c_if(cr, 0)
qc.y(qr[2]).c_if(cr, 1)
qc.z(qr[2]).c_if(cr, 2)
qc.barrier(qr)
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm(1)
self.assertEqual(len(result), (385 +
len(qr.openqasm_name) * 23 +
len(cr.openqasm_name) * 7))
###############################################################
# Test for compile when names are integers
###############################################################
def test_compile_program(self):
"""Test compile_program. Names are integers
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_INT)
qc = q_program.get_circuit(1)
qr = q_program.get_quantum_register(40)
cr = q_program.get_classical_register(50)
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
out = q_program.compile([1], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
self.log.info(out)
self.assertEqual(len(out), 3)
def test_get_execution_list(self):
"""Test get_execution_list with int names.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_INT)
qc = q_program.get_circuit(1)
qr = q_program.get_quantum_register(40)
cr = q_program.get_classical_register(50)
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
qobj = q_program.compile([1], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
result = q_program.get_execution_list(qobj, print_func=self.log.info)
self.log.info(result)
self.assertEqual(result, [1])
def test_change_circuit_qobj_after_compile(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_INT)
qr = q_program.get_quantum_register(40)
cr = q_program.get_classical_register(50)
qc2 = q_program.create_circuit(102, [qr], [cr])
qc3 = q_program.create_circuit(103, [qr], [cr])
qc2.h(qr[0])
qc2.cx(qr[0], qr[1])
qc2.cx(qr[0], qr[2])
qc3.h(qr)
qc2.measure(qr, cr)
qc3.measure(qr, cr)
circuits = [102, 103]
shots = 1024 # the number of shots in the experiment.
backend = 'local_qasm_simulator'
config = {'seed': 10, 'shots': 1, 'xvals': [1, 2, 3, 4]}
qobj1 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
qobj1['circuits'][0]['config']['shots'] = 50
qobj1['circuits'][0]['config']['xvals'] = [1, 1, 1]
config['shots'] = 1000
config['xvals'][0] = 'only for qobj2'
qobj2 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
self.assertTrue(qobj1['circuits'][0]['config']['shots'] == 50)
self.assertTrue(qobj1['circuits'][1]['config']['shots'] == 1)
self.assertTrue(qobj1['circuits'][0]['config']['xvals'] == [1, 1, 1])
self.assertTrue(qobj1['circuits'][1]['config']['xvals'] == [1, 2, 3, 4])
self.assertTrue(qobj1['config']['shots'] == 1024)
self.assertTrue(qobj2['circuits'][0]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][1]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][0]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
self.assertTrue(qobj2['circuits'][1]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
def test_add_circuit(self):
"""Test add two circuits with int names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(1, 2)
cr = q_program.create_classical_register(2, 2)
qc1 = q_program.create_circuit(10, [qr], [cr])
qc2 = q_program.create_circuit(20, [qr], [cr])
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1 + qc2
q_program.add_circuit(1001, new_circuit)
# new_circuit.measure(qr[0], cr[0])
circuits = [1001]
backend = 'local_qasm_simulator' # the backend to run on
shots = 1024 # the number of shots in the experiment.
result = q_program.execute(circuits, backend=backend, shots=shots,
seed=78)
counts = result.get_counts(1001)
target = {'00': shots / 2, '01': shots / 2}
threshold = 0.025 * shots
self.assertDictAlmostEqual(counts, target, threshold)
class TestTupleIds(QiskitTestCase):
"""Circuits and records can have tuples as names"""
def setUp(self):
self.QPS_SPECS_TUPLE = {
"circuits": [{
"name": (1.1, 1j),
"quantum_registers": [{
"name": (40.1, 40j),
"size": 3}],
"classical_registers": [{
"name": (50.1, 50j),
"size": 3}]
}]
}
###############################################################
# Tests to initiate an build a quantum program with tuple ids
###############################################################
def test_create_program_with_specs(self):
"""Test Quantum Object Factory creation using Specs definition
object with tuple names for circuit nor records.
"""
result = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
self.assertIsInstance(result, QuantumProgram)
def test_create_classical_register(self):
"""Test create_classical_register with tuple name
"""
q_program = QuantumProgram()
cr = q_program.create_classical_register((50.1, 50j), 3)
self.assertIsInstance(cr, ClassicalRegister)
def test_create_quantum_register(self):
"""Test create_quantum_register with tuple name.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register((32.1, 32j), 3)
self.assertIsInstance(qr, QuantumRegister)
def test_fail_create_classical_register_name(self):
"""Test duplicated create_quantum_register with int as names.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register((2.1, 2j), 3)
self.assertIsInstance(cr1, ClassicalRegister)
self.assertRaises(QISKitError,
q_program.create_classical_register, (2.1, 2j), 2)
def test_create_quantum_register_same(self):
"""Test create_quantum_register of same tuple name and size.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register((1.1, 1j), 3)
qr2 = q_program.create_quantum_register((1.1, 1j), 3)
self.assertIs(qr1, qr2)
def test_create_classical_register_same(self):
"""Test create_classical_register of same tuple name and size.
"""
q_program = QuantumProgram()
cr1 = q_program.create_classical_register((2.1, 2j), 3)
cr2 = q_program.create_classical_register((2.1, 2j), 3)
self.assertIs(cr1, cr2)
def test_create_classical_registers(self):
"""Test create_classical_registers with tuple name.
"""
q_program = QuantumProgram()
classical_registers = [{"name": (1.1, 1j), "size": 4},
{"name": (2.1, 2j), "size": 2}]
crs = q_program.create_classical_registers(classical_registers)
for i in crs:
self.assertIsInstance(i, ClassicalRegister)
def test_create_quantum_registers(self):
"""Test create_quantum_registers with tuple names
"""
q_program = QuantumProgram()
quantum_registers = [{"name": (1.1, 1j), "size": 4},
{"name": (2.1, 2j), "size": 2}]
qrs = q_program.create_quantum_registers(quantum_registers)
for i in qrs:
self.assertIsInstance(i, QuantumRegister)
def test_destroy_classical_register(self):
"""Test destroy_classical_register with tuple name."""
q_program = QuantumProgram()
_ = q_program.create_classical_register((1.1, 1j), 3)
self.assertIn((1.1, 1j), q_program.get_classical_register_names())
q_program.destroy_classical_register((1.1, 1j))
self.assertNotIn((1.1, 1j), q_program.get_classical_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_classical_register((1.1, 1j))
self.assertIn('Not present', str(context.exception))
def test_destroy_quantum_register(self):
"""Test destroy_quantum_register with tuple name."""
q_program = QuantumProgram()
_ = q_program.create_quantum_register((1.1, 1j), 3)
self.assertIn((1.1, 1j), q_program.get_quantum_register_names())
q_program.destroy_quantum_register((1.1, 1j))
self.assertNotIn((1.1, 1j), q_program.get_quantum_register_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_quantum_register((1.1, 1j))
self.assertIn('Not present', str(context.exception))
def test_create_circuit(self):
"""Test create_circuit with tuple names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register((1.1, 1j), 3)
cr = q_program.create_classical_register((2.1, 2j), 3)
qc = q_program.create_circuit((3.1, 3j), [qr], [cr])
self.assertIsInstance(qc, QuantumCircuit)
def test_create_several_circuits(self):
"""Test create_circuit with several inputs with tuple names.
"""
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register((10.1, 10j), 3)
cr1 = q_program.create_classical_register((20.1, 20j), 3)
qr2 = q_program.create_quantum_register((11.1, 11j), 3)
cr2 = q_program.create_classical_register((21.1, 21j), 3)
qc1 = q_program.create_circuit((30.1, 30j), [qr1], [cr1])
qc2 = q_program.create_circuit((31.1, 31j), [qr2], [cr2])
qc3 = q_program.create_circuit((32.1, 32j), [qr1, qr2], [cr1, cr2])
self.assertIsInstance(qc1, QuantumCircuit)
self.assertIsInstance(qc2, QuantumCircuit)
self.assertIsInstance(qc3, QuantumCircuit)
def test_destroy_circuit(self):
"""Test destroy_circuit with an tuple name."""
q_program = QuantumProgram()
qr = q_program.create_quantum_register((2.1, 2j), 3)
cr = q_program.create_classical_register((1.1, 1j), 3)
_ = q_program.create_circuit((10.1, 10j), [qr], [cr])
self.assertIn((10.1, 10j), q_program.get_circuit_names())
q_program.destroy_circuit((10.1, 10j))
self.assertNotIn((10.1, 10j), q_program.get_circuit_names())
# Destroying an invalid register should fail.
with self.assertRaises(QISKitError) as context:
q_program.destroy_circuit((10.1, 10j))
self.assertIn('Not present', str(context.exception))
def test_get_register_and_circuit_names(self):
"""Get the names of the circuits and registers when their names are ints.
"""
qr1n = (10.1, 10j)
qr2n = (11.1, 11j)
cr1n = (12.1, 12j)
cr2n = (13.1, 13j)
qc1n = (14.1, 14j)
qc2n = (15.1, 15j)
q_program = QuantumProgram()
qr1 = q_program.create_quantum_register(qr1n, 3)
cr1 = q_program.create_classical_register(cr1n, 3)
qr2 = q_program.create_quantum_register(qr2n, 3)
cr2 = q_program.create_classical_register(cr2n, 3)
q_program.create_circuit(qc1n, [qr1], [cr1])
q_program.create_circuit(qc2n, [qr2], [cr2])
q_program.create_circuit(qc2n, [qr1, qr2], [cr1, cr2])
qrn = q_program.get_quantum_register_names()
crn = q_program.get_classical_register_names()
qcn = q_program.get_circuit_names()
self.assertCountEqual(qrn, [qr1n, qr2n])
self.assertCountEqual(crn, [cr1n, cr2n])
self.assertCountEqual(qcn, [qc1n, qc2n])
def test_get_qasm(self):
"""Test the get_qasm with tuple name. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
qc = q_program.get_circuit((1.1, 1j))
qr = q_program.get_quantum_register((40.1, 40j))
cr = q_program.get_classical_register((50.1, 50j))
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.cx(qr[1], qr[2])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm((1.1, 1j))
self.assertEqual(len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4 + 147, len(result))
def test_get_qasms(self):
"""Test the get_qasms with tuple names. They need to be coverted to OpenQASM format.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register((10.1, 10j), 3)
cr = q_program.create_classical_register((20.1, 20j), 3)
qc1 = q_program.create_circuit((101.1, 101j), [qr], [cr])
qc2 = q_program.create_circuit((102.1, 102j), [qr], [cr])
qc1.h(qr[0])
qc1.cx(qr[0], qr[1])
qc1.cx(qr[1], qr[2])
qc1.measure(qr[0], cr[0])
qc1.measure(qr[1], cr[1])
qc1.measure(qr[2], cr[2])
qc2.h(qr)
qc2.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc2.measure(qr[2], cr[2])
result = q_program.get_qasms([(101.1, 101j), (102.1, 102j)])
self.assertEqual(len(qr.openqasm_name) * 9 +
len(cr.openqasm_name) * 4 + 147, len(result[0]))
self.assertEqual(len(qr.openqasm_name) * 7 +
len(cr.openqasm_name) * 4 + 137, len(result[1]))
def test_get_qasm_all_gates(self):
"""Test the get_qasm for more gates. Names are tuples.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
qc = q_program.get_circuit((1.1, 1j))
qr = q_program.get_quantum_register((40.1, 40j))
cr = q_program.get_classical_register((50.1, 50j))
qc.u1(0.3, qr[0])
qc.u2(0.2, 0.1, qr[1])
qc.u3(0.3, 0.2, 0.1, qr[2])
qc.s(qr[1])
qc.s(qr[2]).inverse()
qc.cx(qr[1], qr[2])
qc.barrier()
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.x(qr[2]).c_if(cr, 0)
qc.y(qr[2]).c_if(cr, 1)
qc.z(qr[2]).c_if(cr, 2)
qc.barrier(qr)
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
qc.measure(qr[2], cr[2])
result = q_program.get_qasm((1.1, 1j))
self.assertEqual(len(qr.openqasm_name) * 23 +
len(cr.openqasm_name) * 7 + 385, len(result))
###############################################################
# Test for compile when names are tuples
###############################################################
def test_compile_program(self):
"""Test compile_program. Names are tuples
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
qc = q_program.get_circuit((1.1, 1j))
qr = q_program.get_quantum_register((40.1, 40j))
cr = q_program.get_classical_register((50.1, 50j))
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
out = q_program.compile([(1.1, 1j)], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
self.log.info(out)
self.assertEqual(len(out), 3)
def test_get_execution_list(self):
"""Test get_execution_list with tuple names.
"""
q_program = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
qc = q_program.get_circuit((1.1, 1j))
qr = q_program.get_quantum_register((40.1, 40j))
cr = q_program.get_classical_register((50.1, 50j))
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
backend = 'local_qasm_simulator'
coupling_map = None
qobj = q_program.compile([(1.1, 1j)], backend=backend,
coupling_map=coupling_map, qobj_id='cooljob')
result = q_program.get_execution_list(qobj, print_func=self.log.info)
self.log.info(result)
self.assertCountEqual(result, [(1.1, 1j)])
def test_change_circuit_qobj_after_compile(self):
q_program = QuantumProgram(specs=self.QPS_SPECS_TUPLE)
qr = q_program.get_quantum_register((40.1, 40j))
cr = q_program.get_classical_register((50.1, 50j))
qc2 = q_program.create_circuit((102.1, 102j), [qr], [cr])
qc3 = q_program.create_circuit((103.1, 103j), [qr], [cr])
qc2.h(qr[0])
qc2.cx(qr[0], qr[1])
qc2.cx(qr[0], qr[2])
qc3.h(qr)
qc2.measure(qr, cr)
qc3.measure(qr, cr)
circuits = [(102.1, 102j), (103.1, 103j)]
shots = 1024 # the number of shots in the experiment.
backend = 'local_qasm_simulator'
config = {'seed': 10, 'shots': 1, 'xvals': [1, 2, 3, 4]}
qobj1 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
qobj1['circuits'][0]['config']['shots'] = 50
qobj1['circuits'][0]['config']['xvals'] = [1, 1, 1]
config['shots'] = 1000
config['xvals'][0] = 'only for qobj2'
qobj2 = q_program.compile(circuits, backend=backend, shots=shots,
seed=88, config=config)
self.assertTrue(qobj1['circuits'][0]['config']['shots'] == 50)
self.assertTrue(qobj1['circuits'][1]['config']['shots'] == 1)
self.assertTrue(qobj1['circuits'][0]['config']['xvals'] == [1, 1, 1])
self.assertTrue(qobj1['circuits'][1]['config']['xvals'] == [1, 2, 3, 4])
self.assertTrue(qobj1['config']['shots'] == 1024)
self.assertTrue(qobj2['circuits'][0]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][1]['config']['shots'] == 1000)
self.assertTrue(qobj2['circuits'][0]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
self.assertTrue(qobj2['circuits'][1]['config']['xvals'] == [
'only for qobj2', 2, 3, 4])
def test_add_circuit(self):
"""Test add two circuits with tuple names.
"""
q_program = QuantumProgram()
qr = q_program.create_quantum_register(1, 2)
cr = q_program.create_classical_register(2, 2)
qc1 = q_program.create_circuit((10.1, 10j), [qr], [cr])
qc2 = q_program.create_circuit((20.1, 20j), [qr], [cr])
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1 + qc2
q_program.add_circuit((1001.1, 1001j), new_circuit)
circuits = [(1001.1, 1001j)]
backend = 'local_qasm_simulator' # the backend to run on
shots = 1024 # the number of shots in the experiment.
result = q_program.execute(circuits, backend=backend, shots=shots,
seed=78)
counts = result.get_counts((1001.1, 1001j))
target = {'00': shots / 2, '01': shots / 2}
threshold = 0.025 * shots
self.assertDictAlmostEqual(counts, target, threshold)
class TestAnonymousIdsNoQuantumProgram(QiskitTestCase):
"""Test the anonymous use of registers.
TODO: this needs to be expanded, ending up with the rest of the tests
in the file not using QuantumProgram when it is deprecated.
"""
def test_create_anonymous_classical_register(self):
"""Test creating a ClassicalRegister with no name.
"""
cr = ClassicalRegister(size=3)
self.assertIsInstance(cr, ClassicalRegister)
def test_create_anonymous_quantum_register(self):
"""Test creating a QuantumRegister with no name.
"""
qr = QuantumRegister(size=3)
self.assertIsInstance(qr, QuantumRegister)
def test_create_anonymous_classical_registers(self):
"""Test creating several ClassicalRegister with no name.
"""
cr1 = ClassicalRegister(size=3)
cr2 = ClassicalRegister(size=3)
self.assertNotEqual(cr1.name, cr2.name)
def test_create_anonymous_quantum_registers(self):
"""Test creating several QuantumRegister with no name.
"""
qr1 = QuantumRegister(size=3)
qr2 = QuantumRegister(size=3)
self.assertNotEqual(qr1.name, qr2.name)
def test_create_anonymous_mixed_registers(self):
"""Test creating several Registers with no name.
"""
cr0 = ClassicalRegister(size=3)
qr0 = QuantumRegister(size=3)
# Get the current index counte of the registers
cr_index = int(cr0.name[1:])
qr_index = int(qr0.name[1:])
cr1 = ClassicalRegister(size=3)
_ = QuantumRegister(size=3)
qr2 = QuantumRegister(size=3)
# Check that the counters for each kind are incremented separately.
cr_current = int(cr1.name[1:])
qr_current = int(qr2.name[1:])
self.assertEqual(cr_current, cr_index + 1)
self.assertEqual(qr_current, qr_index + 2)
def test_create_circuit_noname(self):
"""Test create_circuit with no name
"""
q_program = QuantumProgram()
qr = QuantumRegister(size=3)
cr = ClassicalRegister(size=3)
qc = q_program.create_circuit(qregisters=[qr], cregisters=[cr])
self.assertIsInstance(qc, QuantumCircuit)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 41.873498 | 97 | 0.592344 | 7,471 | 59,251 | 4.494579 | 0.04484 | 0.091962 | 0.065875 | 0.035022 | 0.918967 | 0.903273 | 0.883409 | 0.863427 | 0.828643 | 0.810775 | 0 | 0.045008 | 0.264282 | 59,251 | 1,414 | 98 | 41.903112 | 0.725294 | 0.125618 | 0 | 0.813321 | 0 | 0 | 0.037201 | 0 | 0 | 0 | 0 | 0.000707 | 0.162289 | 1 | 0.085366 | false | 0 | 0.002814 | 0 | 0.092871 | 0.003752 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4f55abea01d8bf543f1ab354fdffcb6e703eeb0 | 65,059 | py | Python | Test/spec/simd/meta/simd_i16x8_cmp.py | jplevyak/WAVM | 7234c70f47613a3fb0bfae4da2ba672aafb74ac9 | [
"BSD-3-Clause"
] | 327 | 2015-10-22T16:48:07.000Z | 2021-12-31T14:47:02.000Z | Test/spec/simd/meta/simd_i16x8_cmp.py | jplevyak/WAVM | 7234c70f47613a3fb0bfae4da2ba672aafb74ac9 | [
"BSD-3-Clause"
] | 10 | 2015-12-22T12:11:25.000Z | 2019-10-14T22:42:21.000Z | Test/spec/simd/meta/simd_i16x8_cmp.py | jplevyak/WAVM | 7234c70f47613a3fb0bfae4da2ba672aafb74ac9 | [
"BSD-3-Clause"
] | 30 | 2015-10-22T16:48:12.000Z | 2022-01-18T16:37:10.000Z | #!/usr/bin/env python3
"""
This file is used for generating i16x8 related test cases
which inherites from the 'SimdCmpCase' class and overloads
with the 'get_test_cases' method.
"""
from simd_compare import SimdCmpCase
# Generate i16x8 test case
class Simdi16x8CmpCase(SimdCmpCase):
LANE_TYPE = 'i16x8'
# Overloads base class method and sets test data for i16x8.
def get_case_data(self):
case_data = []
# eq
# i16x8.eq (i16x8) (i16x8)
case_data.append(['#', 'eq'])
case_data.append(['#', 'i16x8.eq (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['eq', ['0xFFFF', '0xFFFF'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0x0000', '0x0000'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0xF0F0', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0x0F0F', '0x0F0F'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['eq', ['0xFFFF', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0xFFFF', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0x8080', '32896'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0x8080', '-32640'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['eq', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'],
['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['eq', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['eq', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['eq', ['0x0F0F', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['-1', '-1', '0', '0', '0', '0', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.eq (i16x8) (i8x16)
case_data.append(['#', 'i16x8.eq (i16x8) (i8x16)'])
case_data.append(['eq', ['0xFFFF', '0xFF'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', ['65535', '255'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', ['0', '0'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', [['-128', '-128', '0', '0', '1', '1', '255', '255'],
['-128', '0', '1', '255']], ['0', '0', '-1', '-1', '0', '0', '0', '0'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['eq', ['0x5555', '0xAA'], '0', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.eq (i16x8) (i32x4)
case_data.append(['#', 'i16x8.eq (i16x8) (i32x4)'])
case_data.append(['eq', ['0xFFFF', '0xFFFFFFFF'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', ['65535', '4294967295'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', ['0', '0'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', [['65535', '0', '1', '32768'], ['65535', '0', '1', '32768']], ['-1', '0', '-1', '-1', '-1', '0', '-1', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['eq', ['0x5555', '0xAAAAAAAA'], '0', ['i16x8', 'i32x4', 'i16x8']])
# ne
# i16x8.ne (i16x8) (i16x8)
case_data.append(['#', 'ne'])
case_data.append(['#', 'i16x8.ne (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['ne', ['0xFFFF', '0xFFFF'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0x0000', '0x0000'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0xF0F0', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0x0F0F', '0x0F0F'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['ne', ['0xFFFF', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0xFFFF', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0x8080', '32896'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0x8080', '-32640'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '0', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['ne', ['-1', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['0', '0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['255', '255'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', ['65535', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['255', '0'], ['255', '0']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0', '255'], ['0', '255']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['255', '32767', '-0', '0', '1', '2', '65534', '65535'],
['255', '32767', '0', '0', '1', '2', '-2', '-1']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['ne', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['ne', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['ne', ['0x0F0F', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['0x8081', '0x8283', '0xFDFE', '0xFF00', '0x0001', '0x027F', '0x80FD', '0xFEFF'],
['65279', '33021', '639', '1', '65280', '65022', '33411', '32897']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ne', [['128', '129', '130', '131', '-0', '255', '32766', '32767'],
['32767', '32766', '255', '-0', '131', '130', '129', '28']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# i16x8.ne (i16x8) (i8x16)
case_data.append(['#', 'i16x8.ne (i16x8) (i8x16)'])
case_data.append(['ne', ['0xFFFF', '0xFF'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', ['65535', '255'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', ['0', '0'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', [['-128', '-128', '0', '0', '1', '1', '255', '255'], ['-128', '0', '1', '255']],
['-1', '-1', '0', '0', '-1', '-1', '-1', '-1'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ne', ['0x5555', '0xAA'], '-1', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.ne (i16x8) (i32x4)
case_data.append(['#', 'i16x8.ne (i16x8) (i32x4)'])
case_data.append(['ne', ['0xFFFF', '0xFFFFFFFF'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', ['65535', '4294967295'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', ['0', '0'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['0', '-1', '0', '0', '0', '-1', '0', '-1'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ne', ['0x5555', '0xAAAAAAAA'], '-1', ['i16x8', 'i32x4', 'i16x8']])
# lt_s
# i16x8.lt_s (i16x8) (i16x8)
case_data.append(['#', 'lt_s'])
case_data.append(['#', 'i16x8.lt_s (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['lt_s', ['0xFFFF', '0xFFFF'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0x0000', '0x0000'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0xF0F0', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0x0F0F', '0x0F0F'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0xFFFF', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0xFFFF', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0x8080', '32896'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0x8080', '-32640'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['-1', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['0', '0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['255', '255'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', ['65535', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['255', '0'], ['255', '0']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0', '255'], ['0', '255']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['255', '32767', '-0', '0', '1', '2', '65534', '65535'], ['255', '32767', '0', '0', '1', '2', '-2', '-1']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['lt_s', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['lt_s', ['0x0F0F', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['0', '0', '-1', '0', '0', '0', '0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['0x8081', '0x8283', '0xFDFE', '0xFF00', '0x0001', '0x027F', '0x80FD', '0xFEFF'],
['65279', '33021', '639', '1', '65280', '65022', '33411', '32897']], ['-1', '0', '-1', '-1', '0', '0', '-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_s', [['128', '129', '130', '131', '-0', '255', '32766', '32767'],
['32767', '32766', '255', '-0', '131', '130', '129', '28']], ['-1', '-1', '-1', '0', '-1', '0', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.lt_s (i16x8) (i8x16)
case_data.append(['#', 'i16x8.lt_s (i16x8) (i8x16)'])
case_data.append(['lt_s', ['0xFFFF', '0xFF'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', ['65535', '255'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', ['0', '0'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', [['-128', '-128', '0', '0', '1', '1', '255', '255'], ['-128', '0', '1', '255']],
['0', '0', '0', '0', '-1', '-1', '0', '0'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_s', ['0x5555', '0xAA'], '0', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.lt_s (i16x8) (i32x4)
case_data.append(['#', 'i16x8.lt_s (i16x8) (i32x4)'])
case_data.append(['lt_s', ['0xFFFF', '0xFFFFFFFF'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', ['65535', '4294967295'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', ['0', '0'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['0', '-1', '0', '0', '0', '0', '0', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_s', ['0x5555', '0xAAAAAAAA'], '0', ['i16x8', 'i32x4', 'i16x8']])
# lt_u
# i16x8.lt_u (i16x8) (i16x8)
case_data.append(['#', 'lt_u'])
case_data.append(['#', 'i16x8.lt_u (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['lt_u', ['0xFFFF', '0xFFFF'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0x0000', '0x0000'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0xF0F0', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0x0F0F', '0x0F0F'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['lt_u', ['0xFFFF', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0xFFFF', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0x8080', '32896'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0x8080', '-32640'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '0', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['lt_u', ['-1', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['0', '0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['255', '255'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', ['65535', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['255', '0'], ['255', '0']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0', '255'], ['0', '255']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['255', '32767', '-0', '0', '1', '2', '65534', '65535'],
['255', '32767', '0', '0', '1', '2', '-2', '-1']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['lt_u', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['lt_u', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['lt_u', ['0x0F0F', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['-1', '-1', '-1', '0', '0', '0', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['0x8081', '0x8283', '0xFDFE', '0xFF00', '0x0001', '0x027F', '0x80FD', '0xFEFF'],
['65279', '33021', '639', '1', '65280', '65022', '33411', '32897']], ['-1', '0', '0', '0', '-1', '-1', '-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['lt_u', [['128', '129', '130', '131', '-0', '255', '32766', '32767'],
['32767', '32766', '255', '-0', '131', '130', '129', '28']], ['-1', '-1', '-1', '0', '-1', '0', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.lt_u (i16x8) (i8x16)
case_data.append(['#', 'i16x8.lt_u (i16x8) (i8x16)'])
case_data.append(['lt_u', ['0xFFFF', '0xFF'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', ['65535', '255'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', ['0', '0'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', [['-128', '-128', '0', '0', '1', '1', '255', '255'], ['-128', '0', '1', '255']], ['0', '-1'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['lt_u', ['0x5555', '0xAA'], '-1', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.lt_u (i16x8) (i32x4)
case_data.append(['#', 'i16x8.lt_u (i16x8) (i32x4)'])
case_data.append(['lt_u', ['0xFFFF', '0xFFFFFFFF'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', ['65535', '4294967295'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', ['0', '0'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']],
['0', '-1', '0', '0', '0', '0', '0', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['lt_u', ['0x5555', '0xAAAAAAAA'], '-1', ['i16x8', 'i32x4', 'i16x8']])
# le_s
# i16x8.le_s (i16x8) (i16x8)
case_data.append(['#', 'le_s'])
case_data.append(['#', 'i16x8.le_s (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['le_s', ['0xFFFF', '0xFFFF'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0x0000', '0x0000'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0xF0F0', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0x0F0F', '0x0F0F'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['le_s', ['0xFFFF', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0xFFFF', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0x8080', '32896'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0x8080', '-32640'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['le_s', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'],
['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['le_s', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['le_s', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['le_s', ['0x0F0F', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['0', '0', '-1', '0', '0', '0', '0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_s', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['-1', '-1', '-1', '-1', '0', '0', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.le_s (i16x8) (i8x16)
case_data.append(['#', 'i16x8.le_s (i16x8) (i8x16)'])
case_data.append(['le_s', ['0xFFFF', '0xFF'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', ['65535', '255'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', ['0', '0'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', [['-128', '-128', '0', '0', '1', '1', '255', '255'], ['-128', '0', '1', '255']],
['0', '0', '-1', '-1', '-1', '-1', '0', '0'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_s', ['0x5555', '0xAA'], '0', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.le_s (i16x8) (i32x4)
case_data.append(['#', 'i16x8.le_s (i16x8) (i32x4)'])
case_data.append(['le_s', ['0xFFFF', '0xFFFFFFFF'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', ['65535', '4294967295'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', ['0', '0'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['-1', '-1', '-1', '-1', '-1', '0', '-1', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_s', ['0x5555', '0xAAAAAAAA'], '0', ['i16x8', 'i32x4', 'i16x8']])
# le_u
# i16x8.le_u (i16x8) (i16x8)
case_data.append(['#', 'le_u'])
case_data.append(['#', 'i16x8.le_u (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['le_u', ['0xFFFF', '0xFFFF'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0x0000', '0x0000'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0xF0F0', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0x0F0F', '0x0F0F'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['le_u', ['0xFFFF', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0xFFFF', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0x8080', '32896'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0x8080', '-32640'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['le_u', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'], ['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['le_u', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['le_u', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['le_u', ['0x0F0F', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['-1', '-1', '-1', '0', '0', '0', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['le_u', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['-1', '-1', '-1', '0', '-1', '0', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.le_u (i16x8) (i8x16)
case_data.append(['#', 'i16x8.le_u (i16x8) (i8x16)'])
case_data.append(['le_u', ['0xFFFF', '0xFF'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', ['65535', '255'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', ['0', '0'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['0', '0', '-1', '-1', '-1', '-1', '-1', '-1'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['le_u', ['0x5555', '0xAA'], '-1', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.le_u (i16x8) (i32x4)
case_data.append(['#', 'i16x8.le_u (i16x8) (i32x4)'])
case_data.append(['le_u', ['0xFFFF', '0xFFFFFFFF'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', ['65535', '4294967295'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', ['0', '0'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['-1', '-1', '-1', '-1', '-1', '0', '-1', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['le_u', ['0x5555', '0xAAAAAAAA'], '-1', ['i16x8', 'i32x4', 'i16x8']])
# gt_s
# i16x8.gt_s (i16x8) (i16x8)
case_data.append(['#', 'gt_s'])
case_data.append(['#', 'i16x8.gt_s (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['gt_s', ['0xFFFF', '0xFFFF'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0x0000', '0x0000'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0xF0F0', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0x0F0F', '0x0F0F'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['gt_s', ['0xFFFF', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0xFFFF', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0x8080', '32896'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0x8080', '-32640'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '0', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['gt_s', ['-1', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['0', '0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['65535', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', ['65535', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['65535', '0'], ['65535', '0']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0', '65535'], ['0', '65535']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'], ['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['gt_s', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['gt_s', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['gt_s', ['0x0F0F', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['-1', '-1', '0', '-1', '-1', '-1', '-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_s', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['0', '0', '0', '0', '-1', '-1', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.gt_s (i16x8) (i8x16)
case_data.append(['#', 'i16x8.gt_s (i16x8) (i8x16)'])
case_data.append(['gt_s', ['0xFFFF', '0xFF'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', ['65535', '255'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', ['0', '0'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['-1', '0', '0', '-1'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_s', ['0x5555', '0xAA'], '-1', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.gt_s (i16x8) (i32x4)
case_data.append(['#', 'i16x8.gt_s (i16x8) (i32x4)'])
case_data.append(['gt_s', ['0xFFFF', '0xFFFFFFFF'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', ['65535', '4294967295'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', ['0', '0'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', [['65535', '0', '1', '32768'], ['65535', '0', '1', '32768']], ['0', '0', '0', '0', '0', '-1', '0', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_s', ['0x5555', '0xAAAAAAAA'], '-1', ['i16x8', 'i32x4', 'i16x8']])
# gt_u
# i16x8.gt_u (i16x8) (i16x8)
case_data.append(['#', 'gt_u'])
case_data.append(['#', 'i16x8.gt_u (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['gt_u', ['0xFFFF', '0xFFFF'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0x0000', '0x0000'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0xF0F0', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0x0F0F', '0x0F0F'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '0', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['gt_u', ['0xFFFF', '65535'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0xFFFF', '-1'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0x8080', '32896'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', ['0x8080', '-32640'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '0', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['eq', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['eq', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'],
['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['gt_u', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['gt_u', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '0', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['gt_u', ['0x0F0F', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['0', '0', '0', '-1', '-1', '-1', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['gt_u', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['0', '0', '0', '-1', '0', '-1', '0', '0'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.gt_u (i16x8) (i8x16)
case_data.append(['#', 'i16x8.gt_u (i16x8) (i8x16)'])
case_data.append(['gt_u', ['0xFFFF', '0xFF'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', ['65535', '255'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', ['0', '0'], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '0', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['-1', '0', '0', '0'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['gt_u', ['0x5555', '0xAA'], '0', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.gt_u (i16x8) (i32x4)
case_data.append(['#', 'i16x8.gt_u (i16x8) (i32x4)'])
case_data.append(['gt_u', ['0xFFFF', '0xFFFFFFFF'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', ['65535', '4294967295'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', ['0', '0'], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '0', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['0', '0', '0', '0', '0', '-1', '0', '-1'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['gt_u', ['0x5555', '0xAAAAAAAA'], '0', ['i16x8', 'i32x4', 'i16x8']])
# ge_s
# i16x8.ge_s (i16x8) (i16x8)
case_data.append(['#', 'ge_s'])
case_data.append(['#', 'i16x8.ge_s (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['ge_s', ['0xFFFF', '0xFFFF'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0x0000', '0x0000'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0xF0F0', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0x0F0F', '0x0F0F'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['ge_s', ['0xFFFF', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0xFFFF', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0x8080', '32896'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0x8080', '-32640'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['ge_s', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'], ['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['ge_s', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['ge_s', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['ge_s', ['0x0F0F', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['-1', '-1', '0', '-1', '-1', '-1', '-1', '0'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_s', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['-1', '-1', '0', '0', '-1', '-1', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.ge_s (i16x8) (i8x16)
case_data.append(['#', 'i16x8.ge_s (i16x8) (i8x16)'])
case_data.append(['ge_s', ['0xFFFF', '0xFF'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', ['65535', '255'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', ['0', '0'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', [['-128', '0', '1', '255'], ['-128', '0', '1', '255']], ['-1', '-1', '-1', '-1', '0', '0', '-1', '-1'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_s', ['0xAAAA', '0x55'], '0', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.ge_s (i16x8) (i32x4)
case_data.append(['#', 'i16x8.ge_s (i16x8) (i32x4)'])
case_data.append(['ge_s', ['0xFFFF', '0xFFFFFFFF'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', ['65535', '4294967295'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', ['0', '0'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', [['65535', '0', '1', '32768'], ['65535', '0', '1', '32768']], ['-1', '0', '-1', '-1', '-1', '-1', '-1', '0'], ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_s', ['0x5555', '0xAAAAAAAA'], '-1', ['i16x8', 'i32x4', 'i16x8']])
# ge_u
# i16x8.ge_u (i16x8) (i16x8)
case_data.append(['#', 'ge_u'])
case_data.append(['#', 'i16x8.ge_u (i16x8) (i16x8)'])
# hex vs hex
case_data.append(['#', 'hex vs hex'])
case_data.append(['ge_u', ['0xFFFF', '0xFFFF'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0x0000', '0x0000'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0xF0F0', '0xF0F0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0x0F0F', '0x0F0F'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0xFFFF', '0x0000'], ['0xFFFF', '0x0000']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x0000', '0xFFFF'], ['0x0000', '0xFFFF']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB'],
['0x0100', '0x0302', '0x0904', '0x1110', '0x0A12', '0x1A0B', '0xAA1B', '0xFFAB']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs dec
case_data.append(['#', 'hex vs dec'])
case_data.append(['ge_u', ['0xFFFF', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0xFFFF', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0x8080', '32896'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0x8080', '-32640'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x8180', '0x8382', '0xFEFD', '0x00FF', '0x0100', '0x7F02', '0xFD80', '0xFFFE'],
['33152', '33666', '65277', '255', '256', '32514', '64896', '65534']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# dec vs dec
case_data.append(['#', 'dec vs dec'])
case_data.append(['ge_u', ['-1', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['0', '0'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['65535', '65535'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', ['65535', '-1'], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['65535', '0'], ['65535', '0']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0', '65535'], ['0', '65535']], '-1', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['-32768', '65534', '-1', '-0', '0', '1', '2', '65535'], ['32768', '-2', '-1', '-0', '0', '1', '2', '-1']], '-1', ['i16x8', 'i16x8', 'i16x8']])
# hex vs float
case_data.append(['#', 'hex vs float'])
case_data.append(['ge_u', [['0x0000', '0xc300', '0x0000', '0xc2fe', '0x0000', '0xbf80', '0x0000', '0x0000'],
['-128.0', '-127.0', '-1.0', '0.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
case_data.append(['ge_u', [['0x0000', '0x3f80', '0x0000', '0x42fe', '0x0000', '0x4300', '0x0000', '0x437f'],
['1.0', '127.0', '128.0', '255.0']], '-1', ['i16x8', 'f32x4', 'i16x8']])
# not equal
case_data.append(['#', 'not equal'])
case_data.append(['ge_u', ['0x0F0F', '0xF0F0'], '0', ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x0000', '0xFFFF'], ['0xFFFF', '0x0000']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x0001', '0x0203', '0x0409', '0x1011', '0x120A', '0x0B1A', '0x1BAA', '0xABFF'],
['0xFFAB', '0xAA1B', '0x1A0B', '0x0A12', '0x1110', '0x0904', '0x0302', '0x0100']], ['0', '0', '0', '-1', '-1', '-1', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['0x8000', '0x8001', '0x8002', '0x8003', '0x8004', '0x8005', '0x8006', '0x8007'],
['32775', '32774', '32773', '32772', '32771', '32770', '32769', '32768']], ['0', '-1'], ['i16x8', 'i16x8', 'i16x8']])
case_data.append(['ge_u', [['32768', '32769', '65534', '65535', '0', '-1', '-32767', '-32768'],
['-32768', '-32767', '-1', '0', '65535', '65534', '32769', '32768']], ['-1', '-1', '0', '-1', '0', '-1', '-1', '-1'], ['i16x8', 'i16x8', 'i16x8']])
# i16x8.ge_u (i16x8) (i8x16)
case_data.append(['#', 'i16x8.ge_u (i16x8) (i8x16)'])
case_data.append(['ge_u', ['0xFFFF', '0xFF'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', ['65535', '255'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', ['0', '0'], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x00', '0x01', '0x02', '0x03', '0x04', '0x05', '0x06', '0x07', '0x08', '0x09', '0x0A', '0x0B', '0x0C', '0x0D', '0x0E', '0x0F']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['-128', '-127', '-126', '-125', '-3', '-2', '-1', '0', '0', '1', '2', '127', '128', '253', '254', '255']], '-1', ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', [['-128', '-128', '0', '0', '1', '1', '255', '255'], ['-128', '0', '1', '255']], ['-1', '0'], ['i16x8', 'i8x16', 'i16x8']])
case_data.append(['ge_u', ['0xAAAA', '0x55'], '-1', ['i16x8', 'i8x16', 'i16x8']])
# i16x8.ge_u (i16x8) (i32x4)
case_data.append(['#', 'i16x8.ge_u (i16x8) (i32x4)'])
case_data.append(['ge_u', ['0xFFFF', '0xFFFFFFFF'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', ['65535', '4294967295'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', ['0', '0'], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', [['0x0100', '0x0302', '0x0504', '0x0706', '0x0908', '0x0B0A', '0x0D0C', '0x0F0E'],
['0x03020100', '0x07060504', '0x0B0A0908', '0x0F0E0D0C']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', [['33152', '33666', '65277', '255', '256', '32514', '64896', '65534'],
['2206368128', '16776957', '2130837760', '4294901120']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', [['65535', '0', '1', '32768'], ['-128', '0', '1', '255']], '-1', ['i16x8', 'i32x4', 'i16x8']])
case_data.append(['ge_u', ['0x5555', '0xAAAAAAAA'], '0', ['i16x8', 'i32x4', 'i16x8']])
return case_data
def gen_test_cases():
i16x8 = Simdi16x8CmpCase()
i16x8.gen_test_cases()
if __name__ == '__main__':
i16x8 = Simdi16x8CmpCase()
i16x8.gen_test_cases()
| 80.919154 | 192 | 0.466315 | 7,506 | 65,059 | 3.923794 | 0.025446 | 0.176558 | 0.231495 | 0.221275 | 0.986011 | 0.981054 | 0.971139 | 0.936609 | 0.886425 | 0.830538 | 0 | 0.277992 | 0.208995 | 65,059 | 803 | 193 | 81.019925 | 0.294314 | 0.025423 | 0 | 0.332795 | 0 | 0 | 0.359133 | 0 | 0 | 0 | 0.125103 | 0 | 0 | 1 | 0.003231 | false | 0 | 0.001616 | 0 | 0.009693 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
be0e55a8f683ca03529d540b1ec5ddbe6365e1bd | 2,006 | py | Python | classifier/predict.py | arkuhn/cvskifree | 506d04002105023f40d29cd915ac00d70ab64a9e | [
"MIT"
] | 1 | 2019-04-18T20:14:34.000Z | 2019-04-18T20:14:34.000Z | classifier/predict.py | arkuhn/cvskifree | 506d04002105023f40d29cd915ac00d70ab64a9e | [
"MIT"
] | null | null | null | classifier/predict.py | arkuhn/cvskifree | 506d04002105023f40d29cd915ac00d70ab64a9e | [
"MIT"
] | null | null | null | from keras.models import load_model
import numpy as np
from PIL import Image
from skimage import transform
model = load_model('model.h5')
np_image = Image.open('left1.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
np_image = Image.open('left2.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
np_image = Image.open('left3.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
np_image = Image.open('right1.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
np_image = Image.open('right2.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
np_image = Image.open('right3.png')
np_image = np.array(np_image).astype('float32')/255
np_image = transform.resize(np_image, (213, 180, 1))
np_image = np.expand_dims(np_image, axis=0)
result = model.predict(np_image)
print(str(result[0][0]))
if result[0][0] >= 0.5:
print('right')
else:
print('left')
| 26.051948 | 52 | 0.694417 | 347 | 2,006 | 3.853026 | 0.123919 | 0.251309 | 0.080778 | 0.071803 | 0.881077 | 0.881077 | 0.881077 | 0.881077 | 0.881077 | 0.881077 | 0 | 0.068516 | 0.119641 | 2,006 | 76 | 53 | 26.394737 | 0.688562 | 0 | 0 | 0.830769 | 0 | 0 | 0.080339 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.061538 | 0 | 0.061538 | 0.276923 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0782c8bdf543bf3cee5855430278ad48791e5a52 | 29,908 | py | Python | tests/integration_tests/fixtures/world_bank_dashboard.py | m-ajay/superset | 2cd80543581155225f2b538ad8cd5ebc7de5a9ff | [
"Apache-2.0"
] | 18,621 | 2017-06-19T09:57:44.000Z | 2021-01-05T06:28:21.000Z | tests/integration_tests/fixtures/world_bank_dashboard.py | m-ajay/superset | 2cd80543581155225f2b538ad8cd5ebc7de5a9ff | [
"Apache-2.0"
] | 9,043 | 2017-07-05T16:10:48.000Z | 2021-01-05T17:58:01.000Z | tests/integration_tests/fixtures/world_bank_dashboard.py | m-ajay/superset | 2cd80543581155225f2b538ad8cd5ebc7de5a9ff | [
"Apache-2.0"
] | 5,527 | 2017-07-06T01:39:43.000Z | 2021-01-05T06:01:11.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import string
from random import choice, randint, random, uniform
from typing import Any, Dict, List
import pandas as pd
import pytest
from pandas import DataFrame
from sqlalchemy import DateTime, String
from superset import db
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_table_metadata,
)
from tests.integration_tests.test_app import app
WB_HEALTH_POPULATION = "wb_health_population"
@pytest.fixture(scope="session")
def load_world_bank_data():
with app.app_context():
database = get_example_database()
dtype = {
"year": DateTime if database.backend != "presto" else String(255),
"country_code": String(3),
"country_name": String(255),
"region": String(255),
}
_get_dataframe(database).to_sql(
WB_HEALTH_POPULATION,
get_example_database().get_sqla_engine(),
if_exists="replace",
chunksize=500,
dtype=dtype,
index=False,
method="multi",
schema=get_example_default_schema(),
)
yield
with app.app_context():
engine = get_example_database().get_sqla_engine()
engine.execute("DROP TABLE IF EXISTS wb_health_population")
@pytest.fixture()
def load_world_bank_dashboard_with_slices(load_world_bank_data):
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)
@pytest.fixture(scope="module")
def load_world_bank_dashboard_with_slices_module_scope(load_world_bank_data):
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)
def create_dashboard_for_loaded_data():
with app.app_context():
table = create_table_metadata(WB_HEALTH_POPULATION, get_example_database())
slices = _create_world_bank_slices(table)
dash = _create_world_bank_dashboard(table, slices)
slices_ids_to_delete = [slice.id for slice in slices]
dash_id_to_delete = dash.id
return dash_id_to_delete, slices_ids_to_delete
def _create_world_bank_slices(table: SqlaTable) -> List[Slice]:
from superset.examples.world_bank import create_slices
slices = create_slices(table)
_commit_slices(slices)
return slices
def _commit_slices(slices: List[Slice]):
for slice in slices:
o = db.session.query(Slice).filter_by(slice_name=slice.slice_name).one_or_none()
if o:
db.session.delete(o)
db.session.add(slice)
db.session.commit()
def _create_world_bank_dashboard(table: SqlaTable, slices: List[Slice]) -> Dashboard:
from superset.examples.world_bank import dashboard_positions
pos = dashboard_positions
from superset.examples.helpers import update_slice_ids
update_slice_ids(pos, slices)
table.fetch_metadata()
dash = create_dashboard(
"world_health", "World Bank's Data", json.dumps(pos), slices
)
dash.json_metadata = '{"mock_key": "mock_value"}'
db.session.commit()
return dash
def _cleanup(dash_id: int, slices_ids: List[int]) -> None:
dash = db.session.query(Dashboard).filter_by(id=dash_id).first()
db.session.delete(dash)
for slice_id in slices_ids:
db.session.query(Slice).filter_by(id=slice_id).delete()
db.session.commit()
def _get_dataframe(database: Database) -> DataFrame:
data = _get_world_bank_data()
df = pd.DataFrame.from_dict(data)
if database.backend == "presto":
df.year = pd.to_datetime(df.year)
df.year = df.year.dt.strftime("%Y-%m-%d %H:%M%:%S")
else:
df.year = pd.to_datetime(df.year)
return df
def _get_world_bank_data() -> List[Dict[Any, Any]]:
data = []
for _ in range(100):
data.append(
{
"country_name": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase + " ")
for _ in range(randint(3, 10))
),
"country_code": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(3)
),
"region": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(randint(3, 10))
),
"year": "-".join(
[str(randint(1900, 2020)), str(randint(1, 12)), str(randint(1, 28))]
),
"NY_GNP_PCAP_CD": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_FM_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ENR_ORPH": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_TER_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_TER_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_XPD_TOTL_GD_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_ANM_CHLD_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_ANM_NPRG_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_AIDS_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_COMM_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_IMRT": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_INJR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_NCOM_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_NMRT": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_DH": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT_FE": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT_MA": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_NMRT": get_random_float_or_none(0, 100, 0.3),
"SH_FPL_SATI_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_RU_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_UR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_0014": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_KW_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_KW_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_ARTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_KNOW_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_KNOW_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_ORPH": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_TOTL": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_HEPB": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_HIB3": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_IBCG": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_IDPT": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_MEAS": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_POL3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_BEDS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MED_CMHW_P3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_NUMW_P3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_PHYS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_NETS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_PREG_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_SPF2_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_TRET_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_DTHS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_LEVE": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_RISK": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_RISK_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_WAGE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_ANEM": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_ARTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_SYPH_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRV_SMOK_FE": get_random_float_or_none(0, 100, 0.3),
"SH_PRV_SMOK_MA": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN_RU": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN_UR": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ANV4_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ANVC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ARIC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BFED_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BRTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BRTW_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_DIAB_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_IYCF_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALR": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MMRT": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MMRT_NE": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ORCF_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ORTH": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_PNVC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_CURE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_DTEC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_INCD": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_PREV": get_random_float_or_none(0, 100, 0.3),
"SH_VAC_TTNS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_EXTR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_OOPC_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_OOPC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PCAP": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PCAP_PP_KD": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PRIV": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PRIV_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL_GX_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_TOTL_CD": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SI_POV_NAHC": get_random_float_or_none(0, 100, 0.3),
"SI_POV_RUHC": get_random_float_or_none(0, 100, 0.3),
"SI_POV_URHC": get_random_float_or_none(0, 100, 0.3),
"SL_EMP_INSV_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_TLF_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_TLF_TOTL_IN": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SM_POP_NETM": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_DEFC": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_DEFC_ZS": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_SALT_ZS": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_VITA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_ADO_TFRT": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_AMRT_FE": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_AMRT_MA": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CBRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CDRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CONU_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_SMAM_FE": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_SMAM_MA": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TFRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TO65_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TO65_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_WFRT": get_random_float_or_none(0, 100, 0.3),
"SP_HOU_FEMA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_MTR_1519_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG00_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG00_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG01_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG01_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG02_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG02_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG03_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG03_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG04_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG04_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG05_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG05_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG06_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG06_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG07_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG07_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG08_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG08_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG09_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG09_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG10_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG10_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG11_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG11_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG12_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG12_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG13_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG13_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG14_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG14_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG15_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG15_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG16_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG16_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG17_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG17_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG18_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG18_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG19_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG19_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG20_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG20_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG21_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG21_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG22_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG22_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG23_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG23_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG24_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG24_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG25_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG25_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_BRTH_MF": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND_OL": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND_YG": get_random_float_or_none(0, 100, 0.3),
"SP_POP_GROW": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_RU_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_UR_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_DTHS_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL_ZG": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_URB_GROW": get_random_float_or_none(0, 100, 0.3),
"SP_URB_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_URB_TOTL_IN_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_UWT_TFRT": get_random_float_or_none(0, 100, 0.3),
}
)
return data
def get_random_float_or_none(min_value, max_value, none_probability):
if random() < none_probability:
return None
else:
return uniform(min_value, max_value)
| 59.223762 | 88 | 0.621506 | 5,088 | 29,908 | 3.146816 | 0.079403 | 0.122166 | 0.28418 | 0.324777 | 0.812879 | 0.798826 | 0.783836 | 0.776466 | 0.774343 | 0.774343 | 0 | 0.113197 | 0.262739 | 29,908 | 504 | 89 | 59.34127 | 0.612925 | 0.025144 | 0 | 0.059341 | 0 | 0 | 0.174543 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024176 | false | 0 | 0.041758 | 0 | 0.081319 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
079df3d41ed3a95d1744ca3825086e5f9412d649 | 6,190 | py | Python | tests/unit_tests/mountcontrol/test_alignStar.py | mworion/MountWizzard4 | 4e06b29ec2ef70be40e114b911b7bdf2f858a4b1 | [
"Apache-2.0"
] | 16 | 2020-01-11T22:32:26.000Z | 2022-03-31T15:18:14.000Z | tests/unit_tests/mountcontrol/test_alignStar.py | mworion/MountWizzard4 | 4e06b29ec2ef70be40e114b911b7bdf2f858a4b1 | [
"Apache-2.0"
] | 196 | 2020-01-16T13:56:01.000Z | 2022-03-29T02:06:51.000Z | tests/unit_tests/mountcontrol/test_alignStar.py | mworion/MountWizzard4 | 4e06b29ec2ef70be40e114b911b7bdf2f858a4b1 | [
"Apache-2.0"
] | 6 | 2019-12-01T19:39:33.000Z | 2021-05-27T13:14:20.000Z | ############################################################
# -*- coding: utf-8 -*-
#
# # # # # # #
# ## ## # ## # #
# # # # # # # # # # #
# # ## # ## ## ######
# # # # # # #
#
# Python-based Tool for interaction with the 10micron mounts
# GUI with PyQT5 for python
#
# written in python3, (c) 2019-2021 by mworion
# Licence APL2.0
#
###########################################################
# standard libraries
import unittest
# external packages
import skyfield.api
from skyfield.api import wgs84
# local imports
from mountcontrol.model import AlignStar
from mountcontrol import obsSite
obsSite.location = wgs84.latlon(latitude_degrees=0,
longitude_degrees=0,
elevation_m=0)
class TestConfigData(unittest.TestCase):
def setUp(self):
pass
def test_APoint_mCoord_1(self):
p1 = '12:45:33.01'
p2 = '+56*30:00.5'
p3 = skyfield.api.Angle(hours=12.5)
aPoint = AlignStar(mCoord=(p1, p2),
pierside='W',
sCoord=(p1, p2),
sidereal=p3,
)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[1], 45, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[2], 33.01, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[1], 30, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[2], 0.5, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[1], 45, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[2], 33.01, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[1], 30, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[2], 0.5, 6)
def test_APoint_mCoord_2(self):
p1 = 12.5
p2 = 56.5
p3 = skyfield.api.Angle(hours=12.5)
aPoint = AlignStar(mCoord=(p1, p2),
pierside='W',
sCoord=(p1, p2),
sidereal=p3,
)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[1], 30, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[2], 0, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[1], 30, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[2], 0, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[1], 30, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[2], 0, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[1], 30, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[2], 0, 6)
def test_APoint_mCoord_3(self):
p3 = skyfield.api.Angle(hours=12.5)
star = skyfield.api.Star(ra_hours=12.55, dec_degrees=56.55)
aPoint = AlignStar(mCoord=star,
pierside='W',
sCoord=star,
sidereal=p3,
)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[1], 33, 6)
self.assertAlmostEqual(aPoint.mCoord.ra.hms()[2], 0, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[1], 33, 6)
self.assertAlmostEqual(aPoint.mCoord.dec.dms()[2], 0, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[0], 12, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[1], 33, 6)
self.assertAlmostEqual(aPoint.sCoord.ra.hms()[2], 0, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[0], 56, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[1], 33, 6)
self.assertAlmostEqual(aPoint.sCoord.dec.dms()[2], 0, 6)
def test_APoint_mCoord_4(self):
p1 = '12:45:33.01'
p2 = '+56*30:00.5'
p3 = '1234.5'
aPoint = AlignStar(mCoord=(p1, p2, p3))
self.assertEqual(None, aPoint.mCoord)
def test_APoint_mCoord_5(self):
p1 = '12:45:33.01'
p2 = '+56*30:00.5'
p3 = '1234.5'
aPoint = AlignStar(mCoord=[p1, p2, p3])
self.assertEqual(None, aPoint.mCoord)
def test_APoint_mCoord_6(self):
aPoint = AlignStar(mCoord=56)
self.assertEqual(None, aPoint.mCoord)
def test_APoint_mCoord_7(self):
p1 = '12:45:EE.01'
aPoint = AlignStar(mCoord=(p1, 67))
self.assertEqual(None, aPoint.mCoord)
def test_APoint_sCoord_1(self):
p1 = '12:45:33.01'
p2 = '+56*30:00.5'
p3 = '1234.5'
aPoint = AlignStar(sCoord=(p1, p2, p3))
self.assertEqual(None, aPoint.sCoord)
def test_APoint_sCoord_2(self):
p1 = '12:45:33.01'
p2 = '+56*30:00.5'
p3 = '1234.5'
aPoint = AlignStar(sCoord=[p1, p2, p3])
self.assertEqual(None, aPoint.sCoord)
def test_APoint_sCoord_3(self):
aPoint = AlignStar(sCoord=56)
self.assertEqual(None, aPoint.sCoord)
def test_APoint_sCoord_4(self):
p1 = '12:45:EE.01'
aPoint = AlignStar(sCoord=(p1, 67))
self.assertEqual(None, aPoint.sCoord)
def test_APoint_pierside_1(self):
aPoint = AlignStar()
aPoint.pierside = 'E'
self.assertEqual('E', aPoint.pierside)
def test_APoint_pierside_2(self):
aPoint = AlignStar()
aPoint.pierside = 'x'
self.assertEqual(aPoint.pierside, None)
def test_APoint_sidereal_1(self):
aPoint = AlignStar()
aPoint.sidereal = 'E'
self.assertEqual(None, aPoint.sidereal)
def test_APoint_sidereal_2(self):
aPoint = AlignStar()
aPoint.sidereal = 12.5
self.assertEqual(aPoint.sidereal.hours, 12.5)
| 36.19883 | 67 | 0.565913 | 762 | 6,190 | 4.531496 | 0.124672 | 0.21894 | 0.281494 | 0.267593 | 0.761367 | 0.714741 | 0.711555 | 0.704025 | 0.637417 | 0.602664 | 0 | 0.081141 | 0.269305 | 6,190 | 170 | 68 | 36.411765 | 0.682291 | 0.050565 | 0 | 0.511811 | 0 | 0 | 0.028622 | 0 | 0 | 0 | 0 | 0 | 0.377953 | 1 | 0.125984 | false | 0.007874 | 0.03937 | 0 | 0.173228 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6af67372c3219c2f6fa233f42b1c3bd1e5875a73 | 24,195 | py | Python | Packs/IntegrationsAndIncidentsHealthCheck/Scripts/InstancesCheck_FailedCategories/test_data/constants.py | diCagri/content | c532c50b213e6dddb8ae6a378d6d09198e08fc9f | [
"MIT"
] | 799 | 2016-08-02T06:43:14.000Z | 2022-03-31T11:10:11.000Z | Packs/IntegrationsAndIncidentsHealthCheck/Scripts/InstancesCheck_FailedCategories/test_data/constants.py | diCagri/content | c532c50b213e6dddb8ae6a378d6d09198e08fc9f | [
"MIT"
] | 9,317 | 2016-08-07T19:00:51.000Z | 2022-03-31T21:56:04.000Z | Packs/IntegrationsAndIncidentsHealthCheck/Scripts/InstancesCheck_FailedCategories/test_data/constants.py | diCagri/content | c532c50b213e6dddb8ae6a378d6d09198e08fc9f | [
"MIT"
] | 1,297 | 2016-08-04T13:59:00.000Z | 2022-03-31T23:43:06.000Z | INCIDENTS_RESULTS = [
{'ModuleName': 'InnerServicesModule', 'Brand': 'Builtin', 'Category': 'Builtin', 'ID': '', 'Version': 0, 'Type': 1,
'Contents': {'ErrorsPrivateDoNotUse': None, 'data': [{'CustomFields': {
'dbotpredictionprobability': 0,
'detectionsla': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 20, 'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'integrationscategories': ['Endpoint',
'Utilities',
'Utilities',
'Utilities',
'Utilities',
'Endpoint',
'Messaging',
'Data Enrichment & Threat Intelligence'],
'integrationsfailedcategories': [
'Data Enrichment & Threat Intelligence',
'Vulnerability Management', 'Endpoint',
'Forensics & Malware Analysis',
'Data Enrichment & Threat Intelligence',
'Endpoint',
'Forensics & Malware Analysis'],
'integrationstestgrid': [{
'analystnote': '(2020-09-30) Need to get the new password',
'brand': 'Active Directory Query v2',
'category': 'Data Enrichment & Threat Intelligence',
'information': 'Failed to access LDAP server. Please validate the server host and port are configured correctly (85)',
'instance': 'Active Directory Query v2_instance_1'},
{'brand': 'BigFix',
'category': 'Vulnerability Management',
'information': "Invalid URL '\\xd7\\x91\\xd7\\x94\\xd7\\xa0/api/help': No schema supplied. Perhaps you meant http://בהנ/api/help? (85)",
'instance': 'BigFix_instance_1'},
{'analystnote': '',
'brand': 'Tanium Threat Response',
'category': 'Endpoint',
'information': "Error in Tanium Threat Response Integration: Invalid URL 'sfgdfg/api/v2/session/login': No schema supplied. Perhaps you meant http://sfgdfg/api/v2/session/login? (85)",
'instance': 'Tanium Threat Response_instance_1'},
{
'analystnote': 'IT maintenance work',
'brand': 'Threat Grid',
'category': 'Forensics & Malware Analysis',
'information': 'Error in API call to Threat Grid service api/v3/session/whoami - {"api_version":3,"id":3964620,"error":{"message":"Unauthorized","code":401,"errors":[{"code":401,"message":"Unauthorized","help":"/doc/main/index.html","report":"support@threatgrid.com"}]}} (85)',
'instance': 'Threat Grid_instance_1'},
{
'analystnote': '(2020-09-30) Creating a new API key',
'brand': 'VirusTotal',
'category': 'Data Enrichment & Threat Intelligence',
'information': '403 Forbidden - The API key is not valid (85)',
'instance': 'VirusTotal_instance_1'},
{
'brand': 'remoteaccess',
'category': 'Endpoint',
'information': 'ssh: handshake failed: ssh: unable to authenticate, attempted methods [none], no supported methods remain',
'instance': 'remoteaccess_instance_1'},
{
'brand': 'Threat Grid',
'category': 'Forensics & Malware Analysis',
'information': 'Error in API call to Threat Grid service api/v3/session/whoami - {"api_version":3,"id":2359938,"error":{"message":"Unauthorized","code":401,"errors":[{"code":401,"message":"Unauthorized","help":"/doc/main/index.html","report":"support@threatgrid.com"}]}} (85)',
'instance': 'Threat Grid_instance_1'}],
'numberofentriesiderrors': 8,
'numberoffailedincidents': 4,
'playbooknameswithfailedtasks': [
'AutoFocusPolling',
'JOB - Integrations and Playbooks Health Check',
'JOB - Integrations and Playbooks Health Check',
'Account Enrichment - Generic v2.1'],
'playbooksfailedcommands': [
'RunPollingCommand', 'SetGridField',
'SetGridField', 'ad-get-user'],
'playbooktaskserrors': [{
'analystnote': '(2020-09-30) John checking',
'commandname': 'RunPollingCommand',
'creationdate': '2020-09-29 16:48:30.261438285Z',
'incidentid': '7',
'numberoferrors': 2,
'owner': '',
'playbookname': 'AutoFocusPolling',
'task_id': None,
'taskid': '3',
'taskname': 'RunPollingCommand'},
{
'commandname': 'SetGridField',
'creationdate': '2020-09-29 14:02:45.82647067Z',
'incidentid': '3',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'JOB - Integrations and Playbooks Health Check',
'task_id': None,
'taskid': '132',
'taskname': 'Creates failed Integrations grid'},
{
'commandname': 'SetGridField',
'creationdate': '2020-09-29 14:02:45.82647067Z',
'incidentid': '3',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'JOB - Integrations and Playbooks Health Check',
'task_id': None,
'taskid': '131',
'taskname': 'Set empty fields to incident grid'},
{
'analystnote': '(2020-09-30) Need to fix integration',
'commandname': 'ad-get-user',
'creationdate': '2020-09-30 15:44:06.930751906Z',
'incidentid': '48',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'Account Enrichment - Generic v2.1',
'task_id': None,
'taskid': '5',
'taskname': 'Get account info from Active Directory'}],
'remediationsla': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 7200,
'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'similarincident': ['49'],
'timetoassignment': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 0,
'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'totalfailedinstances': 6,
'totalgoodinstances': 8,
'totalinstances': 14,
'unassignedincidents': ['7'],
'urlsslverification': []}, 'ShardID': 0,
'account': '', 'activated': '0001-01-01T00:00:00Z',
'allRead': False, 'allReadWrite': False, 'attachment': None,
'autime': 1601481099025455400, 'canvases': None,
'category': '', 'closeNotes': 'Created a new incident type.',
'closeReason': '',
'closed': '2020-10-03T12:11:14.655131155Z',
'closingUserId': 'DBot',
'created': '2020-09-30T15:51:39.025455427Z',
'dbotCreatedBy': 'admin', 'dbotCurrentDirtyFields': None,
'dbotDirtyFields': None, 'dbotMirrorDirection': '',
'dbotMirrorId': '', 'dbotMirrorInstance': '',
'dbotMirrorLastSync': '0001-01-01T00:00:00Z',
'dbotMirrorTags': None, 'details': '', 'droppedCount': 0,
'dueDate': '0001-01-01T00:00:00Z', 'feedBased': False,
'hasRole': False, 'id': '50', 'investigationId': '50',
'isPlayground': False,
'labels': [{'type': 'Instance', 'value': 'admin'},
{'type': 'Brand', 'value': 'Manual'}],
'lastJobRunTime': '0001-01-01T00:00:00Z',
'lastOpen': '0001-01-01T00:00:00Z', 'linkedCount': 0,
'linkedIncidents': None,
'modified': '2020-10-03T12:11:14.66586887Z',
'name': 'Integrations and Incidents Health Check',
'notifyTime': '2020-09-30T16:03:26.267646986Z',
'occurred': '2020-09-30T15:51:39.02545521Z',
'openDuration': 245975, 'owner': 'admin', 'parent': '',
'phase': '',
'playbookId': 'JOB - Integrations and Playbooks Health Check',
'previousAllRead': False, 'previousAllReadWrite': False,
'previousRoles': None, 'rawCategory': '',
'rawCloseReason': '', 'rawJSON': '',
'rawName': 'Integrations and Incidents Health Check',
'rawPhase': '',
'rawType': 'Integrations and Incidents Health Check',
'reason': '', 'reminder': '0001-01-01T00:00:00Z',
'roles': None, 'runStatus': 'waiting', 'severity': 0,
'sla': 0, 'sortValues': ['_score'], 'sourceBrand': 'Manual',
'sourceInstance': 'admin', 'status': 2,
'type': 'Integrations and Incidents Health Check',
'version': 33}], 'total': 1}, 'HumanReadable': None,
'ImportantEntryContext': None, 'EntryContext': None, 'IgnoreAutoExtract': False, 'ReadableContentsFormat': '',
'ContentsFormat': 'json', 'File': '', 'FileID': '', 'FileMetadata': None, 'System': '', 'Note': False,
'Evidence': False, 'EvidenceID': '', 'Tags': None,
'Metadata': {'id': '', 'version': 0, 'modified': '0001-01-01T00:00:00Z', 'sortValues': None, 'roles': None,
'allRead': False, 'allReadWrite': False, 'previousRoles': None, 'previousAllRead': False,
'previousAllReadWrite': False, 'hasRole': False, 'dbotCreatedBy': '', 'ShardID': 0, 'type': 1,
'created': '2020-10-03T13:25:21.989620639Z', 'retryTime': '0001-01-01T00:00:00Z', 'user': '',
'errorSource': '', 'contents': '', 'format': 'json', 'investigationId': '51', 'file': '',
'fileID': '', 'parentId': '166@51', 'pinned': False, 'fileMetadata': None,
'parentContent': '!getIncidents id="50"', 'parentEntryTruncated': False, 'system': '',
'reputations': None, 'category': '', 'note': False, 'isTodo': False, 'tags': None, 'tagsRaw': None,
'startDate': '0001-01-01T00:00:00Z', 'times': 0, 'recurrent': False,
'endingDate': '0001-01-01T00:00:00Z', 'timezoneOffset': 0, 'cronView': False, 'scheduled': False,
'entryTask': None, 'taskId': '', 'playbookId': '', 'reputationSize': 0, 'contentsSize': 0,
'brand': 'Builtin', 'instance': 'Builtin', 'IndicatorTimeline': None, 'mirrored': False},
'IndicatorTimeline': None}]
INCIDENTS_RESULTS_EXPECTED = {'Contents': {'params': {'layout': 'horizontal'},
'stats': [{'color': '#0003e8',
'data': [1],
'groups': None,
'label': '0',
'name': '0'}]},
'ContentsFormat': 'pie',
'Type': 17}
INCIDENTS_RESULTS_NO_FAILED = [
{'ModuleName': 'InnerServicesModule', 'Brand': 'Builtin', 'Category': 'Builtin', 'ID': '', 'Version': 0, 'Type': 1,
'Contents': {'ErrorsPrivateDoNotUse': None, 'data': [{'CustomFields': {
'dbotpredictionprobability': 0,
'detectionsla': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 20, 'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'integrationscategories': ['Endpoint',
'Utilities',
'Utilities',
'Utilities',
'Utilities',
'Endpoint',
'Messaging',
'Data Enrichment & Threat Intelligence'],
'integrationstestgrid': [{
'analystnote': '(2020-09-30) Need to get the new password',
'brand': 'Active Directory Query v2',
'category': 'Data Enrichment & Threat Intelligence',
'information': 'Failed to access LDAP server. Please validate the server host and port are configured correctly (85)',
'instance': 'Active Directory Query v2_instance_1'},
{'brand': 'BigFix',
'category': 'Vulnerability Management',
'information': "Invalid URL '\\xd7\\x91\\xd7\\x94\\xd7\\xa0/api/help': No schema supplied. Perhaps you meant http://בהנ/api/help? (85)",
'instance': 'BigFix_instance_1'},
{'analystnote': '',
'brand': 'Tanium Threat Response',
'category': 'Endpoint',
'information': "Error in Tanium Threat Response Integration: Invalid URL 'sfgdfg/api/v2/session/login': No schema supplied. Perhaps you meant http://sfgdfg/api/v2/session/login? (85)",
'instance': 'Tanium Threat Response_instance_1'},
{
'analystnote': 'IT maintenance work',
'brand': 'Threat Grid',
'category': 'Forensics & Malware Analysis',
'information': 'Error in API call to Threat Grid service api/v3/session/whoami - {"api_version":3,"id":3964620,"error":{"message":"Unauthorized","code":401,"errors":[{"code":401,"message":"Unauthorized","help":"/doc/main/index.html","report":"support@threatgrid.com"}]}} (85)',
'instance': 'Threat Grid_instance_1'},
{
'analystnote': '(2020-09-30) Creating a new API key',
'brand': 'VirusTotal',
'category': 'Data Enrichment & Threat Intelligence',
'information': '403 Forbidden - The API key is not valid (85)',
'instance': 'VirusTotal_instance_1'},
{
'brand': 'remoteaccess',
'category': 'Endpoint',
'information': 'ssh: handshake failed: ssh: unable to authenticate, attempted methods [none], no supported methods remain',
'instance': 'remoteaccess_instance_1'},
{
'brand': 'Threat Grid',
'category': 'Forensics & Malware Analysis',
'information': 'Error in API call to Threat Grid service api/v3/session/whoami - {"api_version":3,"id":2359938,"error":{"message":"Unauthorized","code":401,"errors":[{"code":401,"message":"Unauthorized","help":"/doc/main/index.html","report":"support@threatgrid.com"}]}} (85)',
'instance': 'Threat Grid_instance_1'}],
'numberofentriesiderrors': 8,
'numberoffailedincidents': 4,
'playbooknameswithfailedtasks': [
'AutoFocusPolling',
'JOB - Integrations and Playbooks Health Check',
'JOB - Integrations and Playbooks Health Check',
'Account Enrichment - Generic v2.1'],
'playbooksfailedcommands': [
'RunPollingCommand', 'SetGridField',
'SetGridField', 'ad-get-user'],
'playbooktaskserrors': [{
'analystnote': '(2020-09-30) John checking',
'commandname': 'RunPollingCommand',
'creationdate': '2020-09-29 16:48:30.261438285Z',
'incidentid': '7',
'numberoferrors': 2,
'owner': '',
'playbookname': 'AutoFocusPolling',
'task_id': None,
'taskid': '3',
'taskname': 'RunPollingCommand'},
{
'commandname': 'SetGridField',
'creationdate': '2020-09-29 14:02:45.82647067Z',
'incidentid': '3',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'JOB - Integrations and Playbooks Health Check',
'task_id': None,
'taskid': '132',
'taskname': 'Creates failed Integrations grid'},
{
'commandname': 'SetGridField',
'creationdate': '2020-09-29 14:02:45.82647067Z',
'incidentid': '3',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'JOB - Integrations and Playbooks Health Check',
'task_id': None,
'taskid': '131',
'taskname': 'Set empty fields to incident grid'},
{
'analystnote': '(2020-09-30) Need to fix integration',
'commandname': 'ad-get-user',
'creationdate': '2020-09-30 15:44:06.930751906Z',
'incidentid': '48',
'numberoferrors': 2,
'owner': 'admin',
'playbookname': 'Account Enrichment - Generic v2.1',
'task_id': None,
'taskid': '5',
'taskname': 'Get account info from Active Directory'}],
'remediationsla': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 7200,
'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'similarincident': ['49'],
'timetoassignment': {'accumulatedPause': 0,
'breachTriggered': False,
'dueDate': '0001-01-01T00:00:00Z',
'endDate': '0001-01-01T00:00:00Z',
'lastPauseDate': '0001-01-01T00:00:00Z',
'runStatus': 'idle',
'sla': 0,
'slaStatus': -1,
'startDate': '0001-01-01T00:00:00Z',
'totalDuration': 0},
'totalfailedinstances': 6,
'totalgoodinstances': 8,
'totalinstances': 14,
'unassignedincidents': ['7'],
'urlsslverification': []}, 'ShardID': 0,
'account': '', 'activated': '0001-01-01T00:00:00Z',
'allRead': False, 'allReadWrite': False, 'attachment': None,
'autime': 1601481099025455400, 'canvases': None,
'category': '', 'closeNotes': 'Created a new incident type.',
'closeReason': '',
'closed': '2020-10-03T12:11:14.655131155Z',
'closingUserId': 'DBot',
'created': '2020-09-30T15:51:39.025455427Z',
'dbotCreatedBy': 'admin', 'dbotCurrentDirtyFields': None,
'dbotDirtyFields': None, 'dbotMirrorDirection': '',
'dbotMirrorId': '', 'dbotMirrorInstance': '',
'dbotMirrorLastSync': '0001-01-01T00:00:00Z',
'dbotMirrorTags': None, 'details': '', 'droppedCount': 0,
'dueDate': '0001-01-01T00:00:00Z', 'feedBased': False,
'hasRole': False, 'id': '50', 'investigationId': '50',
'isPlayground': False,
'labels': [{'type': 'Instance', 'value': 'admin'},
{'type': 'Brand', 'value': 'Manual'}],
'lastJobRunTime': '0001-01-01T00:00:00Z',
'lastOpen': '0001-01-01T00:00:00Z', 'linkedCount': 0,
'linkedIncidents': None,
'modified': '2020-10-03T12:11:14.66586887Z',
'name': 'Integrations and Incidents Health Check',
'notifyTime': '2020-09-30T16:03:26.267646986Z',
'occurred': '2020-09-30T15:51:39.02545521Z',
'openDuration': 245975, 'owner': 'admin', 'parent': '',
'phase': '',
'playbookId': 'JOB - Integrations and Playbooks Health Check',
'previousAllRead': False, 'previousAllReadWrite': False,
'previousRoles': None, 'rawCategory': '',
'rawCloseReason': '', 'rawJSON': '',
'rawName': 'Integrations and Incidents Health Check',
'rawPhase': '',
'rawType': 'Integrations and Incidents Health Check',
'reason': '', 'reminder': '0001-01-01T00:00:00Z',
'roles': None, 'runStatus': 'waiting', 'severity': 0,
'sla': 0, 'sortValues': ['_score'], 'sourceBrand': 'Manual',
'sourceInstance': 'admin', 'status': 2,
'type': 'Integrations and Incidents Health Check',
'version': 33}], 'total': 1}, 'HumanReadable': None,
'ImportantEntryContext': None, 'EntryContext': None, 'IgnoreAutoExtract': False, 'ReadableContentsFormat': '',
'ContentsFormat': 'json', 'File': '', 'FileID': '', 'FileMetadata': None, 'System': '', 'Note': False,
'Evidence': False, 'EvidenceID': '', 'Tags': None,
'Metadata': {'id': '', 'version': 0, 'modified': '0001-01-01T00:00:00Z', 'sortValues': None, 'roles': None,
'allRead': False, 'allReadWrite': False, 'previousRoles': None, 'previousAllRead': False,
'previousAllReadWrite': False, 'hasRole': False, 'dbotCreatedBy': '', 'ShardID': 0, 'type': 1,
'created': '2020-10-03T13:25:21.989620639Z', 'retryTime': '0001-01-01T00:00:00Z', 'user': '',
'errorSource': '', 'contents': '', 'format': 'json', 'investigationId': '51', 'file': '',
'fileID': '', 'parentId': '166@51', 'pinned': False, 'fileMetadata': None,
'parentContent': '!getIncidents id="50"', 'parentEntryTruncated': False, 'system': '',
'reputations': None, 'category': '', 'note': False, 'isTodo': False, 'tags': None, 'tagsRaw': None,
'startDate': '0001-01-01T00:00:00Z', 'times': 0, 'recurrent': False,
'endingDate': '0001-01-01T00:00:00Z', 'timezoneOffset': 0, 'cronView': False, 'scheduled': False,
'entryTask': None, 'taskId': '', 'playbookId': '', 'reputationSize': 0, 'contentsSize': 0,
'brand': 'Builtin', 'instance': 'Builtin', 'IndicatorTimeline': None, 'mirrored': False},
'IndicatorTimeline': None}]
INCIDENTS_RESULTS_NO_FAILED_EXPECTED = {'Contents': {'params': {'layout': 'horizontal'},
'stats': [{'color': '#0003e8',
'data': [1],
'groups': None,
'label': '0',
'name': '0'}]},
'ContentsFormat': 'pie',
'Type': 17}
| 58.868613 | 294 | 0.497499 | 1,947 | 24,195 | 6.155624 | 0.178737 | 0.022028 | 0.040384 | 0.047726 | 0.98106 | 0.98106 | 0.98106 | 0.98106 | 0.98106 | 0.98106 | 0 | 0.093653 | 0.344203 | 24,195 | 410 | 295 | 59.012195 | 0.661688 | 0 | 0 | 0.945946 | 0 | 0.019656 | 0.515478 | 0.07919 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.004914 | 0.004914 | 0 | 0.004914 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed1b4a59f16eb2ffae23dfedcaf8c20b78bccc91 | 26,428 | py | Python | tools/work_dirs/vit-b16-bs64x4/vit-b16-bs64x4.py | TanZheling/mmclassification | 1c3ff80f4f8a0b57a57eb08f2325a0c4befb7201 | [
"Apache-2.0"
] | null | null | null | tools/work_dirs/vit-b16-bs64x4/vit-b16-bs64x4.py | TanZheling/mmclassification | 1c3ff80f4f8a0b57a57eb08f2325a0c4befb7201 | [
"Apache-2.0"
] | null | null | null | tools/work_dirs/vit-b16-bs64x4/vit-b16-bs64x4.py | TanZheling/mmclassification | 1c3ff80f4f8a0b57a57eb08f2325a0c4befb7201 | [
"Apache-2.0"
] | null | null | null | model = dict(
type='ImageClassifier',
backbone=dict(
type='VisionTransformer',
arch='b',
img_size=224,
patch_size=16,
drop_rate=0.1,
init_cfg=[
dict(
type='Kaiming',
layer='Conv2d',
mode='fan_in',
nonlinearity='linear')
]),
neck=None,
head=dict(
type='VisionTransformerClsHead',
num_classes=1000,
in_channels=768,
loss=dict(
type='LabelSmoothLoss', label_smooth_val=0.1,
mode='classy_vision'),
hidden_dim=3072),
train_cfg=dict(
augments=dict(
type='BatchMixup', alpha=0.2, num_classes=1000, prob=1.0)))
policy_imagenet = [[{
'type': 'Posterize',
'bits': 4,
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 30.0,
'prob': 0.6
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}],
[{
'type': 'Solarize',
'thr': 170.66666666666666,
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 6,
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 10.0,
'prob': 0.2
}, {
'type': 'Solarize',
'thr': 28.444444444444443,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 30.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.0
}, {
'type': 'Equalize',
'prob': 0.8
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.2,
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.8,
'prob': 0.8
}, {
'type': 'Solarize',
'thr': 56.888888888888886,
'prob': 0.8
}],
[{
'type': 'Sharpness',
'magnitude': 0.7,
'prob': 0.4
}, {
'type': 'Invert',
'prob': 0.6
}],
[{
'type': 'Shear',
'magnitude': 0.16666666666666666,
'prob': 0.6,
'direction': 'horizontal'
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}]]
dataset_type = 'ImageNet'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='RandomResizedCrop', size=224, backend='pillow'),
dict(type='RandomFlip', flip_prob=0.5, direction='horizontal'),
dict(
type='AutoAugment',
policies=[[{
'type': 'Posterize',
'bits': 4,
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 30.0,
'prob': 0.6
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}],
[{
'type': 'Solarize',
'thr': 170.66666666666666,
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 6,
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 10.0,
'prob': 0.2
}, {
'type': 'Solarize',
'thr': 28.444444444444443,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 30.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.0
}, {
'type': 'Equalize',
'prob': 0.8
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.2,
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.8,
'prob': 0.8
}, {
'type': 'Solarize',
'thr': 56.888888888888886,
'prob': 0.8
}],
[{
'type': 'Sharpness',
'magnitude': 0.7,
'prob': 0.4
}, {
'type': 'Invert',
'prob': 0.6
}],
[{
'type': 'Shear',
'magnitude': 0.16666666666666666,
'prob': 0.6,
'direction': 'horizontal'
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}]]),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='ToTensor', keys=['gt_label']),
dict(type='Collect', keys=['img', 'gt_label'])
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='Resize', size=(256, -1), backend='pillow'),
dict(type='CenterCrop', crop_size=224),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
]
data = dict(
samples_per_gpu=16,
workers_per_gpu=1,
train=dict(
type='ImageNet',
data_prefix='/datasets/imagenet/train',
pipeline=[
dict(type='LoadImageFromFile'),
dict(type='RandomResizedCrop', size=224, backend='pillow'),
dict(type='RandomFlip', flip_prob=0.5, direction='horizontal'),
dict(
type='AutoAugment',
policies=[[{
'type': 'Posterize',
'bits': 4,
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 30.0,
'prob': 0.6
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}],
[{
'type': 'Solarize',
'thr': 170.66666666666666,
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Posterize',
'bits': 6,
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 10.0,
'prob': 0.2
}, {
'type': 'Solarize',
'thr': 28.444444444444443,
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.6
}, {
'type': 'Posterize',
'bits': 5,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}],
[{
'type': 'Rotate',
'angle': 30.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.0
}, {
'type': 'Equalize',
'prob': 0.8
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Rotate',
'angle': 26.666666666666668,
'prob': 0.8
}, {
'type': 'ColorTransform',
'magnitude': 0.2,
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.8,
'prob': 0.8
}, {
'type': 'Solarize',
'thr': 56.888888888888886,
'prob': 0.8
}],
[{
'type': 'Sharpness',
'magnitude': 0.7,
'prob': 0.4
}, {
'type': 'Invert',
'prob': 0.6
}],
[{
'type': 'Shear',
'magnitude': 0.16666666666666666,
'prob': 0.6,
'direction': 'horizontal'
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.0,
'prob': 0.4
}, {
'type': 'Equalize',
'prob': 0.6
}],
[{
'type': 'Equalize',
'prob': 0.4
}, {
'type': 'Solarize',
'thr': 142.22222222222223,
'prob': 0.2
}],
[{
'type': 'Solarize',
'thr': 113.77777777777777,
'prob': 0.6
}, {
'type': 'AutoContrast',
'prob': 0.6
}],
[{
'type': 'Invert',
'prob': 0.6
}, {
'type': 'Equalize',
'prob': 1.0
}],
[{
'type': 'ColorTransform',
'magnitude': 0.4,
'prob': 0.6
}, {
'type': 'Contrast',
'magnitude': 0.8,
'prob': 1.0
}],
[{
'type': 'Equalize',
'prob': 0.8
}, {
'type': 'Equalize',
'prob': 0.6
}]]),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='ToTensor', keys=['gt_label']),
dict(type='Collect', keys=['img', 'gt_label'])
]),
val=dict(
type='ImageNet',
data_prefix='/datasets/imagenet/val',
ann_file='/datasets/imagenet/meta/val.txt',
pipeline=[
dict(type='LoadImageFromFile'),
dict(type='Resize', size=(256, -1), backend='pillow'),
dict(type='CenterCrop', crop_size=224),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
]),
test=dict(
type='ImageNet',
data_prefix='/datasets/imagenet/val',
ann_file='/datasets/imagenet/meta/val.txt',
pipeline=[
dict(type='LoadImageFromFile'),
dict(type='Resize', size=(256, -1), backend='pillow'),
dict(type='CenterCrop', crop_size=224),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
]))
evaluation = dict(interval=1, metric='accuracy')
optimizer = dict(type='AdamW', lr=0.003, weight_decay=0.3)
optimizer_config = dict(grad_clip=dict(max_norm=1.0))
paramwise_cfg = dict(
custom_keys=dict({
'.backbone.cls_token': dict(decay_mult=0.0),
'.backbone.pos_embed': dict(decay_mult=0.0)
}))
lr_config = dict(
policy='CosineAnnealing',
min_lr=0,
warmup='linear',
warmup_iters=10000,
warmup_ratio=0.0001)
runner = dict(type='EpochBasedRunner', max_epochs=300)
checkpoint_config = dict(interval=1)
log_config = dict(
interval=100,
hooks=[
dict(type='TextLoggerHook'),
dict(
type='WandbLoggerHook',
init_kwargs=dict(
project='transformer',
entity='zlt',
name='mmcls-vit-b-p16bs256x4-imagenet'))
])
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
work_dir = './work_dirs/vit-b16-bs64x4'
gpu_ids = range(0, 4)
| 34.911493 | 77 | 0.250832 | 1,547 | 26,428 | 4.241112 | 0.130575 | 0.099832 | 0.057613 | 0.086877 | 0.838134 | 0.833562 | 0.833562 | 0.82716 | 0.82716 | 0.82716 | 0 | 0.125389 | 0.623392 | 26,428 | 756 | 78 | 34.957672 | 0.533809 | 0 | 0 | 0.900794 | 0 | 0 | 0.151052 | 0.007984 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ed207231bc6f57f6b3fbed1c383ad301ff9c420f | 6,845 | py | Python | tests/components/humidifier/test_intent.py | liangleslie/core | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | tests/components/humidifier/test_intent.py | liangleslie/core | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | [
"Apache-2.0"
] | 24,710 | 2016-04-13T08:27:26.000Z | 2020-03-02T12:59:13.000Z | tests/components/humidifier/test_intent.py | liangleslie/core | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Tests for the humidifier intents."""
from homeassistant.components.humidifier import (
ATTR_AVAILABLE_MODES,
ATTR_HUMIDITY,
DOMAIN,
SERVICE_SET_HUMIDITY,
SERVICE_SET_MODE,
intent,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
ATTR_SUPPORTED_FEATURES,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.intent import IntentHandleError, async_handle
from tests.common import async_mock_service
async def test_intent_set_humidity(hass):
"""Test the set humidity intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_ON, {ATTR_HUMIDITY: 40}
)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await async_handle(
hass,
"test",
intent.INTENT_HUMIDITY,
{"name": {"value": "Bedroom humidifier"}, "humidity": {"value": "50"}},
)
await hass.async_block_till_done()
assert result.speech["plain"]["speech"] == "The bedroom humidifier is set to 50%"
assert len(turn_on_calls) == 0
assert len(humidity_calls) == 1
call = humidity_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_HUMIDITY
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_HUMIDITY) == 50
async def test_intent_set_humidity_and_turn_on(hass):
"""Test the set humidity intent for turned off humidifier."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_OFF, {ATTR_HUMIDITY: 40}
)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await async_handle(
hass,
"test",
intent.INTENT_HUMIDITY,
{"name": {"value": "Bedroom humidifier"}, "humidity": {"value": "50"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "Turned bedroom humidifier on and set humidity to 50%"
)
assert len(turn_on_calls) == 1
call = turn_on_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_TURN_ON
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert len(humidity_calls) == 1
call = humidity_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_HUMIDITY
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_HUMIDITY) == 50
async def test_intent_set_mode(hass):
"""Test the set mode intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_ON,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await async_handle(
hass,
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "The mode for bedroom humidifier is set to away"
)
assert len(turn_on_calls) == 0
assert len(mode_calls) == 1
call = mode_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_MODE
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_MODE) == "away"
async def test_intent_set_mode_and_turn_on(hass):
"""Test the set mode intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_OFF,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await async_handle(
hass,
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "Turned bedroom humidifier on and set away mode"
)
assert len(turn_on_calls) == 1
call = turn_on_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_TURN_ON
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert len(mode_calls) == 1
call = mode_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_MODE
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_MODE) == "away"
async def test_intent_set_mode_tests_feature(hass):
"""Test the set mode intent where modes are not supported."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_ON, {ATTR_HUMIDITY: 40}
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
await intent.async_setup_intents(hass)
try:
await async_handle(
hass,
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
assert False, "handling intent should have raised"
except IntentHandleError as err:
assert str(err) == "Entity bedroom humidifier does not support modes"
assert len(mode_calls) == 0
async def test_intent_set_unknown_mode(hass):
"""Test the set mode intent for unsupported mode."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_ON,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
await intent.async_setup_intents(hass)
try:
await async_handle(
hass,
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "eco"}},
)
assert False, "handling intent should have raised"
except IntentHandleError as err:
assert str(err) == "Entity bedroom humidifier does not support eco mode"
assert len(mode_calls) == 0
| 31.837209 | 85 | 0.655077 | 842 | 6,845 | 5.051069 | 0.098575 | 0.095932 | 0.076182 | 0.049377 | 0.889725 | 0.865036 | 0.842229 | 0.8253 | 0.815424 | 0.815424 | 0 | 0.00822 | 0.235793 | 6,845 | 214 | 86 | 31.985981 | 0.804817 | 0.004821 | 0 | 0.713483 | 0 | 0 | 0.160673 | 0.053252 | 0 | 0 | 0 | 0 | 0.224719 | 1 | 0 | false | 0 | 0.022472 | 0 | 0.022472 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed2438d430f4b45df9915934f878078839c684ea | 7,372 | py | Python | franka_insertion_prediction/utils/solver.py | uenian33/Franka_Panda_IK_Sensor | c9956fb7a7f1d570104296af72aa2a600085ae6e | [
"MIT"
] | null | null | null | franka_insertion_prediction/utils/solver.py | uenian33/Franka_Panda_IK_Sensor | c9956fb7a7f1d570104296af72aa2a600085ae6e | [
"MIT"
] | null | null | null | franka_insertion_prediction/utils/solver.py | uenian33/Franka_Panda_IK_Sensor | c9956fb7a7f1d570104296af72aa2a600085ae6e | [
"MIT"
] | 1 | 2021-12-07T11:47:03.000Z | 2021-12-07T11:47:03.000Z | import torch
import torch.nn as nn
import os
from torch import optim
import numpy as np
import torch_optimizer as torch_optim
from sklearn.metrics import confusion_matrix, accuracy_score
class Solver(object):
def __init__(self, args, model, train_loader, test_loader):
self.args = args
self.train_loader, self.test_loader = train_loader, test_loader
self.model = model#Transformer(args)#.cuda()
self.mse = nn.MSELoss()
self.pretrain_model_save_path=os.path.join(self.args['pretrain_model_path'])
self.online_model_save_path=os.path.join(self.args['online_model_path'])
#print('--------Network--------')
#print(self.model)
if args['load_model']:
print("Using pretrained model")
self.load_model()
#self.model.load_state_dict(torch.load(os.path.join(self.args.model_path, 'Transformer.pt')))
def test_dataset(self, db='test'):
self.model.eval()
actual = []
pred = []
mse_loss = 0
if db.lower() == 'train':
loader = self.train_loader
elif db.lower() == 'test':
loader = self.test_loader
for (inputs, labels) in loader:
#inputs = inputs.cuda()
with torch.no_grad():
preds = self.model(inputs)
mse_loss += self.mse(preds, labels)
#print((preds - labels) / np.array([1000, 1000, 1]))
return mse_loss
def test(self):
train_acc = self.test_dataset('train')
print("Tr Acc: %.2f" % (train_acc))
test_acc = self.test_dataset('test')
print("Te Acc: %.2f" % (test_acc))
return train_acc, test_acc
def load_model(self):
if self.args['resume']:
self.model.load_state_dict(torch.load(self.online_model_save_path))
else:
self.model.load_state_dict(torch.load(self.pretrain_model_save_path))
#model.eval()
return
def save_model(self, path):
torch.save(self.model.state_dict(), path)
return
def train(self, epochs=None, log_epoch=50):
total_iters = 0
best_acc = 0
iter_per_epoch = len(self.train_loader)
test_epoch = max(self.args['epochs'] // 10, 1)
#optimizer = optim.Adam(self.model.parameters(), self.args.lr, weight_decay=1e-5)
#cos_decay = optim.lr_scheduler.CosineAnnealingLR(optimizer, self.args.epochs)
optimizer = torch_optim.Yogi(self.model.parameters(),
self.args['lr'], weight_decay=1e-5)
cos_decay = optim.lr_scheduler.CosineAnnealingLR(optimizer, self.args['epochs'])
if epochs is None:
epochs = self.args['epochs']
for epoch in range(epochs):
self.model.train()
for i, (inputs, labels) in enumerate(self.train_loader):
total_iters += 1
#inputs, labels = inputs.cuda(), labels.cuda()
preds = self.model(inputs)
clf_loss = self.mse(preds, labels)
optimizer.zero_grad()
clf_loss.backward()
optimizer.step()
if epoch % log_epoch == 0: #i == (iter_per_epoch - 1)
print('Ep: %d/%d, it: %d/%d, total_iters: %d, err: %.4f'
% (epoch + 1, self.args['epochs'], i + 1, iter_per_epoch, total_iters, clf_loss))
if (epoch + 1) % log_epoch == 0:
mse= self.test_dataset('test')
print("Test acc: %0.2f" % (mse))
if mse > best_acc:
best_acc = mse
if self.args['resume']:
self.save_model(self.online_model_save_path)
else:
self.save_model(self.pretrain_model_save_path)
cos_decay.step()
#self.save_model(self.finalmodel_save_path)
class SequenceSolver(object):
def __init__(self, args, model, train_loader, test_loader):
self.args = args
self.train_loader, self.test_loader = train_loader, test_loader
self.model = model#Transformer(args)#.cuda()
self.mse = nn.MSELoss()
self.pretrain_model_save_path=os.path.join(self.args['pretrain_model_path'])
self.online_model_save_path=os.path.join(self.args['online_model_path'])
print('--------Network--------')
print(self.model)
if args['load_model']:
print("Using pretrained model")
self.load_model()
#self.model.load_state_dict(torch.load(os.path.join(self.args.model_path, 'Transformer.pt')))
def test_dataset(self, db='test'):
self.model.eval()
actual = []
pred = []
mse_loss = 0
if db.lower() == 'train':
loader = self.train_loader
elif db.lower() == 'test':
loader = self.test_loader
for (inputs, force, labels) in loader:
#inputs = inputs.cuda()
with torch.no_grad():
preds = self.model(inputs, force)
mse_loss += self.mse(preds, labels)
print((preds - labels) / np.array([1000, 1000, 1]))
return mse_loss
def test(self):
train_acc = self.test_dataset('train')
print("Tr Acc: %.2f" % (train_acc))
test_acc = self.test_dataset('test')
print("Te Acc: %.2f" % (test_acc))
return train_acc, test_acc
def load_model(self):
self.model.load_state_dict(torch.load(self.finalmodel_save_path))
#model.eval()
return
def train(self):
total_iters = 0
best_acc = 0
iter_per_epoch = len(self.train_loader)
test_epoch = max(self.args['epochs'] // 10, 1)
#optimizer = optim.Adam(self.model.parameters(), self.args.lr, weight_decay=1e-5)
#cos_decay = optim.lr_scheduler.CosineAnnealingLR(optimizer, self.args.epochs)
optimizer = torch_optim.Yogi(self.model.parameters(),
self.args['lr'], weight_decay=1e-5)
cos_decay = optim.lr_scheduler.CosineAnnealingLR(optimizer, self.args['epochs'])
for epoch in range(self.args['epochs']):
self.model.train()
for i, (inputs, force, labels) in enumerate(self.train_loader):
total_iters += 1
#inputs, labels = inputs.cuda(), labels.cuda()
preds = self.model(inputs, force)
clf_loss = self.mse(preds, labels)
optimizer.zero_grad()
clf_loss.backward()
optimizer.step()
if epoch % 100 == 0: #i == (iter_per_epoch - 1)
print('Ep: %d/%d, it: %d/%d, total_iters: %d, err: %.4f'
% (epoch + 1, self.args['epochs'], i + 1, iter_per_epoch, total_iters, clf_loss))
if (epoch + 1) % 100 == 0:
mse= self.test_dataset('test')
print("Test acc: %0.2f" % (mse))
if mse < best_acc:
best_acc = mse
torch.save(self.model.state_dict(), self.model_save_path)
cos_decay.step()
torch.save(self.model.state_dict(), self.finalmodel_save_path)
| 32.910714 | 107 | 0.557379 | 895 | 7,372 | 4.389944 | 0.126257 | 0.05294 | 0.035632 | 0.021379 | 0.89972 | 0.881649 | 0.848816 | 0.792823 | 0.766098 | 0.766098 | 0 | 0.013878 | 0.315789 | 7,372 | 224 | 108 | 32.910714 | 0.765067 | 0.121541 | 0 | 0.743056 | 0 | 0.013889 | 0.069558 | 0.003563 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076389 | false | 0 | 0.048611 | 0 | 0.1875 | 0.090278 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed3c873321b1ce3f3ff419d426fdc5dd2b4e153b | 28,805 | py | Python | orthoexon/table.py | jessicalettes/orthoexon | 463ad1908364c602cf75dbddb0b16a42f4100a36 | [
"BSD-3-Clause"
] | null | null | null | orthoexon/table.py | jessicalettes/orthoexon | 463ad1908364c602cf75dbddb0b16a42f4100a36 | [
"BSD-3-Clause"
] | null | null | null | orthoexon/table.py | jessicalettes/orthoexon | 463ad1908364c602cf75dbddb0b16a42f4100a36 | [
"BSD-3-Clause"
] | null | null | null |
__author__ = 'rhythmicstar'
import copy
import gffutils
import pandas as pd
import numpy as np
from .util import separate, splitstart, splitend
species1DB = gffutils.FeatureDB('/Users/rhythmicstar/projects/exon_evolution//'
'gencode.v19.annotation.humanrbfox2and'
'fmr1andsnap25.gtf.db', keep_order=True)
species2DB = gffutils.FeatureDB('/Users/rhythmicstar/projects/exon_evolution//'
'gencode.vM5.annotation.mouserbfox2andfmr1and'
'snap25.gtf.db', keep_order=True)
class OrthologyTable(object):
exon1 = 'exon1'
exon2 = 'exon2'
def __init__(self, nucleotide_table, protein_table, species1_name,
species1_version, species2_name, species2_version):
self.nucleotide = self.read_blast_table(nucleotide_table, 'nucleotide')
self.protein = self.read_blast_table(protein_table, 'protein')
def read_blast_table(self, filename, sequence='protein'):
"""Parse blast alignment table, with only non-identical comparisons
Parameters
----------
filename : str
Path of the blast output
sequence : "protein" | "nucleotide"
Specifies whether the table being read is from nucleotide or
protein comparisons, which modifies the column names of the output
pandas DataFrame
Output
------
table : pandas.DataFrame
Parsed table of the exon alignments
"""
abbrev = 'prot' if sequence == 'protein' else "nuc"
columns = [self.exon1, self.exon2, '{}_length_of_overlap'.format(abbrev),
'{}_PID'.format(abbrev)]
table = pd.read_table(filename, names=columns)
# Get only rows where exon1 and exon2 don't have the same ID
different = table[self.exon1] != table[self.exon2]
table = table.loc[different]
return table
def remove_duplicate_comparisons(self, table):
"""Check for the same comparison in the opposite order
E.g. (exonA, exonB) is the same as (exonB, exonA), so we want to
remove that
Parameters
----------
table : pandas.DataFrame
Modified blast orthology table
Output
------
table: pandas.DataFrame
Parsed table with same comparison in opposite order removed
"""
seen = set([])
rows_to_use = []
for i, row in table.iterrows():
exon1 = row[self.exon1]
exon2 = row[self.exon2]
pair = tuple(sorted([exon1, exon2]))
if pair not in seen:
seen.update({pair})
rows_to_use.append(i)
table = table.loc[rows_to_use]
table = table.reset_index(drop=True)
return table
def blast_table_setup(self, ortho_table):
ref_blast_table = copy.deepcopy(ortho_table)
rows = int(ref_blast_table.size/7)
for row in range (0, rows):
ref_blast_table.ix[(row + rows), 0] = ref_blast_table.ix[row, 1]
cols = [1,2,3]
ref_blast_table.drop(ref_blast_table.columns[cols],axis=1,inplace=True)
ref_blast_table = ref_blast_table.drop_duplicates('Exon')
ref_blast_table = ref_blast_table.reset_index(drop=True)
return ref_blast_table
def add_ortho_table_columns(self, nucleotide_table, protein_table,
rows, protrow):
"""add Prot_PID, Prot_Length_of_Overlap to ortho_table
Parameters
----------
nucleotide_table : pandas.DataFrame
Modified blastn orthology table
protein_table : pandas.DataFrame
Modified blastp orthology table
Output
------
ortho_table: pandas.DataFrame
Orthology table with rows from both protein and nucleotide blast
"""
ortholog_columns = pd.DataFrame(columns=['Prot_PID'])
nucleotide_table.insert(3, 'Prot_Length_of_Overlap', 'Below Threshold of ...')
nucleotide_table = pd.concat([nucleotide_table, ortholog_columns], axis=1)
nucleotide_table = nucleotide_table.replace(np.nan,'Below Threshold of ...', regex=True)
for rowN in range (0, rows):
for rowP in range (0, protrow):
exon1n = nucleotide_table.iat[rowN, 0]
exon1p = protein_table.iat[rowP, 0]
exon2n = nucleotide_table.iat[rowN, 1]
exon2p = protein_table.iat[rowP, 1]
if exon1n == exon1p & exon2n == exon2p:
nucleotide_table.iat[rowN, 5] = protein_table.iat[rowP, 3]
nucleotide_table.iat[rowN, 3] = protein_table.iat[rowP, 2]
protein_table.ix[rowP, 0] = np.nan
break
protein_table.dropna(axis=0,inplace=True)
protein_table = protein_table.reset_index(drop=True)
protein_table.insert(2, 'Nuc_Length_of_Overlap', 'Below Threshold of ...')
protein_table.insert(4, 'Nuc_PID', 'Below Threshold of ...')
ortho_table = pd.concat([nucleotide_table, protein_table])
ortho_table = ortho_table.reset_index(drop=True)
return ortho_table
def fill_in_blast_table(self, exon, speciesname, speciesversion):
GFFUtilsExonId = str(exon['exon_id']) # gffutils id
GFFUtilsExonId = separate(GFFUtilsExonId)
if (GFFUtilsExonId == exon_ID2):
exon_geneid = str(exon.attributes['gene_id'])
exon_geneid = separate(exon_geneid)
exon_transcriptid = str(exon.attributes['transcript_id'])
exon_transcriptid = separate(exon_transcriptid)
new_blast_row = [exon_ID, "{}{}{}{}".format(
speciesname,'(', speciesversion, ')'), (
"{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
exon.strand, exon.frame)), exon_geneid,
exon_transcriptid, exon.stop - exon.start + 1]
return new_blast_row
# figure out how to loop and get location
# def location(self):
# ("{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
# exon.strand, exon.frame)
# fix this!!! get rid of blast and call location
# def orthology(self, ortho_table, blast_table, start1 = '0', start2 = '1',
# end1 = '0', end2 = '1'):
# """add orthology classification to ortho_table
#
# Parameters
# ----------
# ortho_table : pandas.DataFrame
# Modified blast orthology table
# blast_table :
# Table with
#
# Output
# ------
# table: pandas.DataFrame
# Orthology table with added columns for protein pid and overlap
# """
#
# if (((ortho_table.iat[row, 0])[0:7]) != ((ortho_table.iat[row, 1])[0:7])):
# ortho_table.ix[row, 'Relationship'] = 'Orthologous'
# else:
# for inrow in range (0, blastRows):
# if (ortho_table.iat[row, 0] == blast_table.iat[inrow, 0]):
# start1 = splitstart(blast_table.iat[inrow, 2])
# end1 = splitend(blast_table.iat[inrow, 2])
# if (ortho_table.iat[row, 1] == blast_table.iat[inrow, 0]):
# start2 = splitstart(blast_table.iat[inrow, 2])
# end2 = splitend(blast_table.iat[inrow, 2])
# if (((start1 >= start2) and (end2 >= end1)) or
# ((start2 >= start1) and (end1 >= end2)) or
# ((start2 >= start1) and (end2 >= end1) and
# (start2 <= end1)) or
# ((start1 >= start2) and (end1 >= end2) and
# (start1 <= end2))):
# ortho_table.iat[row, 6] = 'Overlapping Genomic Loci'
# if (blast_table.iat[inrow, 2] == blast_table.iat[inrow, 2]):
# ortho_table.iat[row, 6] = 'Identical Genomic Loci'
# else:
# ortho_table.iat[row, 6] = 'Paralogous'
# return ortho_table
def save_to_csv(self, table, table_type):
"""Save table to csv format
Parameters
----------
table_type : type of table being saved
Either ortho or blast
"""
blast_columns = ['Exon', 'Species(Version)',
'Chrom:Start-Stop:Strand:Offset', 'Gene', 'Transcript',
'Exon_Length']
ortho_columns = ['Exon', 'Exon2', 'Nuc_Length_of_Overlap',
'Prot_Length_of_Overlap', 'Nuc_PID', 'Prot_PID',
'Relationship']
if table_type == 'blast':
filename = "BLAST_Table.csv"
column = blast_columns
else:
filename = "Ortho_Table.csv"
column = ortho_columns
table.to_csv(filename, columns= column, index=False)
def create_ortho_table(species1name, species2name, species1version,
species2version, blastnucfilename, blastprotfilename):
read_blast_table(self, blastnucfilename, sequence='nucleotide')
read_blast_table(self, blastprotfilename, sequence='protein')
remove_duplicate_comparisons(self, ortho_table)
remove_duplicate_comparisons(self, prot_table)
# add the other columns to the ortho_table
ortholog_columns = pd.DataFrame(columns=['Prot_PID'])
# Overlap_Column = pd.DataFrame(columns=['Prot_Length_of_Overlap'])
ortho_table.insert(3, 'Prot_Length_of_Overlap', 'Below Threshold of ...')
# append new dataframe to ortho_table
ortho_table = pd.concat([ortho_table, ortholog_columns], axis=1)
ortho_table = ortho_table.replace(np.nan,'Below Threshold of ...', regex=True)
BLAST_Table = []
# to drop a row that has the same two exons
rows = int(ortho_table.size/6)
# loop through each row
for row in range (0, rows):
if ortho_table.iat[row, 0] == ortho_table.iat[row, 1]:
ortho_table.ix[row, 0] = np.nan
# remove each duplicate
ortho_table.dropna(axis=0,inplace=True)
ortho_table = ortho_table.reset_index(drop=True)
protrow = int(prot_table.size/4)
# loop through each row
for row in range (0, protrow):
if prot_table.iat[row, 0] == prot_table.iat[row, 1]:
prot_table.ix[row, 0] = np.nan
# remove each duplicate
prot_table.dropna(axis=0,inplace=True)
prot_table = prot_table.reset_index(drop=True)
# to remove rows in nuc table that have the same two exons in the opposite order
seen = set([])
rows_to_use = []
for i, row in ortho_table.iterrows():
exon1 = row['Exon']
exon2 = row['Exon2']
pair = tuple(sorted([exon1, exon2]))
if pair not in seen:
seen.update({pair})
rows_to_use.append(i)
ortho_table_no_duplicate_comparisons = ortho_table.loc[rows_to_use]
ortho_table_no_duplicate_comparisons = ortho_table_no_duplicate_comparisons.reset_index(drop=True)
ortho_table = ortho_table_no_duplicate_comparisons
# to remove rows in protein table that have the same two exons in the opposite order
seen = set([])
rows_to_use = []
for i, row in prot_table.iterrows():
exon1 = row['Exon']
exon2 = row['Exon2']
pair = tuple(sorted([exon1, exon2]))
if pair not in seen:
seen.update({pair})
rows_to_use.append(i)
prot_table_no_duplicate_comparisons = prot_table.loc[rows_to_use]
prot_table_no_duplicate_comparisons = prot_table_no_duplicate_comparisons.reset_index(drop=True)
prot_table = prot_table_no_duplicate_comparisons
rows = int(ortho_table.size/6)
protrow = int(prot_table.size/4)
for rowN in range (0, rows):
for rowP in range (0,protrow):
exon1n = ortho_table.iat[rowN, 0]
exon1p = prot_table.iat[rowP, 0]
exon2n = ortho_table.iat[rowN, 1]
exon2p = prot_table.iat[rowP, 1]
if ((exon1n == exon1p) & (exon2n == exon2p)):
# put protein pid in nuc table
ortho_table.iat[rowN, 5] = prot_table.iat[rowP, 3]
ortho_table.iat[rowN, 3] = prot_table.iat[rowP, 2]
# put na into protein table
prot_table.ix[rowP, 0] = np.nan
break
# remove each duplicate
prot_table.dropna(axis=0,inplace=True)
prot_table = prot_table.reset_index(drop=True)
# in prot_table, add col with 'Nuc_PID' between 'Length_of_Overlap' and 'Prot_PID'
prot_table.insert(2, 'Nuc_Length_of_Overlap', 'Below Threshold of ...')
prot_table.insert(4, 'Nuc_PID', 'Below Threshold of ...')
# add protein table at end
ortho_table = pd.concat([ortho_table, prot_table])
ortho_table = ortho_table.reset_index(drop=True)
# create new dataframe
Relationship_Column = pd.DataFrame(columns=['Relationship'])
# append new dataframe to ortho_table
ortho_table = pd.concat([ortho_table, Relationship_Column], axis=1)
# create BLAST_Table dataframe with exon ids
Ref_BLAST_Table = copy.deepcopy(ortho_table)
rows = int(Ref_BLAST_Table.size/7)
for row in range(0, rows):
Ref_BLAST_Table.ix[(row + rows), 0] = Ref_BLAST_Table.ix[row, 1]
cols = [1,2,3,4,5,6]
Ref_BLAST_Table.drop(Ref_BLAST_Table.columns[cols],axis=1,inplace=True)
Ref_BLAST_Table = Ref_BLAST_Table.drop_duplicates('Exon')
Ref_BLAST_Table = Ref_BLAST_Table.reset_index(drop=True)
# in form to do
# get data to fill in table
rows = int(Ref_BLAST_Table.size)
for exonCode in species1DB.features_of_type('CDS'):
Species1Code = str(exonCode['exon_id'])
Species1Code = separate(Species1Code)
Species1Code = Species1Code[0:7]
break
# loop through each exon and get its gene and length
for row in range(0, rows):
exon_ID = Ref_BLAST_Table.ix[row, 'Exon']
exon_ID2 = separate(exon_ID)
# determine which file to look in
if exon_ID2[0:7] == Species1Code:
for exon in species1DB.features_of_type('CDS'):
gffutilsexonid = str(exon['exon_id']) #gffutils id
gffutilsexonid = separate(gffutilsexonid)
if gffutilsexonid == exon_ID2:
exon_geneid = str(exon.attributes['gene_id'])
exon_geneid = separate(exon_geneid)
exon_transcriptid = str(exon.attributes['transcript_id'])
exon_transcriptid = separate(exon_transcriptid)
New_BLAST_Row = [exon_ID, ("{}{}{}{}").format(species1name,'(', species1version, ')'),
("{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
exon.strand, exon.frame)), exon_geneid,
exon_transcriptid, exon.stop - exon.start + 1]
BLAST_Table.append(New_BLAST_Row)
else:
# if exon is mouse
for exon in species2DB.features_of_type('CDS'):
gffutilsexonid = str(exon['exon_id']) #gffutils id
gffutilsexonid = separate(gffutilsexonid)
if gffutilsexonid == exon_ID2:
exon_geneid = str(exon.attributes['gene_id'])
exon_geneid = separate(exon_geneid)
exon_transcriptid = str(exon.attributes['transcript_id'])
exon_transcriptid = separate(exon_transcriptid)
New_BLAST_Row = [exon_ID, ("{}{}{}{}").format(species2name,'(', species2version, ')'),
("{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
exon.strand, exon.frame)), exon_geneid,
exon_transcriptid, exon.stop - exon.start + 1]
BLAST_Table.append(New_BLAST_Row)
BLAST_Table = pd.DataFrame(BLAST_Table)
BLAST_Table.columns = ['Exon', 'Species(Version)', 'Chrom:Start-Stop:Strand:Offset', 'Gene', 'Transcript', 'Exon_Length']
# fill in row for paralogous or orthologous
rows = int(ortho_table.size/7)
blastRows = int(BLAST_Table.size/6)
for row in range (0, rows):
# paralogous
if ((ortho_table.iat[row, 0])[0:7] != ortho_table.iat[row, 1])[0:7]:
ortho_table.ix[row, 'Relationship'] = 'Orthologous'
# orthologous
else:
exonOne = ortho_table.iat[row, 0]
exonTwo = ortho_table.iat[row, 1]
location_one = 0
location_two = 1
start1 = '0'
start2 = '1'
end1 = '0'
end2 = '1'
for innerrow in range (0, blastRows):
if exonOne == BLAST_Table.iat[innerrow, 0]:
location_one = BLAST_Table.iat[innerrow, 2]
start1 = splitstart(location_one)
end1 = splitend(location_one)
if exonTwo == BLAST_Table.iat[innerrow, 0]:
location_two = BLAST_Table.iat[innerrow, 2]
start2 = splitstart(location_two)
end2 = splitend(location_two)
if (((start1 >= start2) and (end2 >= end1)) or
((start2 >= start1) and (end1 >= end2)) or
((start2 >= start1) and (end2 >= end1) and
(start2 <= end1)) or
((start1 >= start2) and (end1 >= end2) and
(start1 <= end2))):
ortho_table.iat[row, 6] = 'Overlapping Genomic Loci'
if location_one == location_two:
ortho_table.iat[row, 6] = 'Identical Genomic Loci'
else:
ortho_table.iat[row, 6] = 'Paralogous'
# to save BLAST_Table
BLAST_Table.to_csv("BLAST_Table.csv", columns=['Exon', 'Species(Version)', 'Chrom:Start-Stop:Strand:Offset',
'Gene', 'Transcript', 'Exon_Length'], index=False)
# to save ortho_table
ortho_table.to_csv("ortho_table.csv", columns=['Exon', 'Exon2', 'Nuc_Length_of_Overlap', 'Prot_Length_of_Overlap',
'Nuc_PID', 'Prot_PID', 'Relationship'], index=False)
def create_blast_table(species1name, species2name, species1version, species2version):
Ortho_Table = pd.read_table('/Users/rhythmicstar/blast/db//nuctable.html',
names=['Exon', 'Exon2', 'Nuc_Length_of_Overlap', 'Nuc_PID'])
Prot_Table = pd.read_table('/Users/rhythmicstar/blast/db//protable.html',
names=['Exon', 'Exon2', 'Prot_Length_of_Overlap', 'Prot_PID'])
# add the other columns to the ortho_table
Ortholog_Columns = pd.DataFrame(columns=['Prot_PID'])
# Overlap_Column = pd.DataFrame(columns=['Prot_Length_of_Overlap'])
Ortho_Table.insert(3, 'Prot_Length_of_Overlap', 'Below Threshold of ...')
# append new dataframe to ortho_table
Ortho_Table = pd.concat([Ortho_Table, Ortholog_Columns], axis=1)
Ortho_Table = Ortho_Table.replace(np.nan,'Below Threshold of ...', regex=True)
BLAST_Table = []
#to drop a row that has the same two exons
rows = int(Ortho_Table.size/6)
#loop through each row
for row in range(0, rows):
ifOrtho_Table.iat[row, 0] == Ortho_Table.iat[row, 1]:
Ortho_Table.ix[row, 0] = np.nan
#remove each duplicate
Ortho_Table.dropna(axis=0,inplace=True)
Ortho_Table = Ortho_Table.reset_index(drop=True)
protrow = int(Prot_Table.size/4)
#loop through each row
for row in range(0, protrow):
ifProt_Table.iat[row, 0] == Prot_Table.iat[row, 1]:
Prot_Table.ix[row, 0] = np.nan
#remove each duplicate
Prot_Table.dropna(axis=0,inplace=True)
Prot_Table = Prot_Table.reset_index(drop=True)
#to remove rows in nuc table that have the same two exons in the opposite order
seen = set([])
rows_to_use = []
for i, row in Ortho_Table.iterrows():
exon1 = row['Exon']
exon2 = row['Exon2']
pair = tuple(sorted([exon1, exon2]))
if pair not in seen:
seen.update({pair})
rows_to_use.append(i)
ortho_table_no_duplicate_comparisons = Ortho_Table.loc[rows_to_use]
ortho_table_no_duplicate_comparisons = ortho_table_no_duplicate_comparisons.reset_index(drop=True)
Ortho_Table = ortho_table_no_duplicate_comparisons
#to remove rows in protein table that have the same two exons in the opposite order
seen = set([])
rows_to_use = []
for i, row in Prot_Table.iterrows():
exon1 = row['Exon']
exon2 = row['Exon2']
pair = tuple(sorted([exon1, exon2]))
if pair not in seen:
seen.update({pair})
rows_to_use.append(i)
prot_table_no_duplicate_comparisons = Prot_Table.loc[rows_to_use]
prot_table_no_duplicate_comparisons = prot_table_no_duplicate_comparisons.reset_index(drop=True)
Prot_Table = prot_table_no_duplicate_comparisons
rows = int(Ortho_Table.size/6)
protrow = int(Prot_Table.size/4)
for rowN in range (0, rows):
for rowP in range (0,protrow):
exon1n = Ortho_Table.iat[rowN, 0]
exon1p = Prot_Table.iat[rowP, 0]
exon2n = Ortho_Table.iat[rowN, 1]
exon2p = Prot_Table.iat[rowP, 1]
if exon1n == exon1p & exon2n == exon2p:
# put protein pid in nuc table
Ortho_Table.iat[rowN, 5] = Prot_Table.iat[rowP, 3]
Ortho_Table.iat[rowN, 3] = Prot_Table.iat[rowP, 2]
# put na into protein table
Prot_Table.ix[rowP, 0] = np.nan
break
# remove each duplicate
Prot_Table.dropna(axis=0,inplace=True)
Prot_Table = Prot_Table.reset_index(drop=True)
# in prot_table, add col with 'Nuc_PID' between 'Length_of_Overlap' and 'Prot_PID'
Prot_Table.insert(2, 'Nuc_Length_of_Overlap', 'Below Threshold of ...')
Prot_Table.insert(4, 'Nuc_PID', 'Below Threshold of ...')
# add protein table at end
Ortho_Table = pd.concat([Ortho_Table, Prot_Table])
Ortho_Table = Ortho_Table.reset_index(drop=True)
# create new dataframe
Relationship_Column = pd.DataFrame(columns=['Relationship'])
# append new dataframe to ortho_table
Ortho_Table = pd.concat([Ortho_Table, Relationship_Column], axis=1)
# create BLAST_Table dataframe with exon ids
Ref_BLAST_Table = copy.deepcopy(Ortho_Table)
rows = int(Ref_BLAST_Table.size/7)
for row in range(0, rows):
Ref_BLAST_Table.ix[(row + rows), 0] = Ref_BLAST_Table.ix[row, 1]
cols = [1,2,3,4,5,6]
Ref_BLAST_Table.drop(Ref_BLAST_Table.columns[cols],axis=1,inplace=True)
Ref_BLAST_Table = Ref_BLAST_Table.drop_duplicates('Exon')
Ref_BLAST_Table = Ref_BLAST_Table.reset_index(drop=True)
# in form to do
# get data to fill in table
rows = int(Ref_BLAST_Table.size)
for exonCode in species1DB.features_of_type('CDS'):
Species1Code = str(exonCode['exon_id'])
Species1Code = separate(Species1Code)
Species1Code = Species1Code[0:7]
break
# loop through each exon and get its gene and length
for row in range(0, rows):
exon_ID = Ref_BLAST_Table.ix[row, 'Exon']
exon_ID2 = separate(exon_ID)
# determine which file to look in
if exon_ID2[0:7] == Species1Code:
for exon in species1DB.features_of_type('CDS'):
gffutilsexonid = str(exon['exon_id']) #gffutils id
gffutilsexonid = separate(gffutilsexonid)
if gffutilsexonid == exon_ID2:
exon_geneid = str(exon.attributes['gene_id'])
exon_geneid = separate(exon_geneid)
exon_transcriptid = str(exon.attributes['transcript_id'])
exon_transcriptid = separate(exon_transcriptid)
New_BLAST_Row = [exon_ID, ("{}{}{}{}").format(species1name,'(', species1version, ')'),
("{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
exon.strand, exon.frame)), exon_geneid,
exon_transcriptid, exon.stop - exon.start + 1]
BLAST_Table.append(New_BLAST_Row)
else:
# if exon is mouse
for exon in species2DB.features_of_type('CDS'):
gffutilsexonid = str(exon['exon_id']) #gffutils id
gffutilsexonid = separate(gffutilsexonid)
if gffutilsexonid == exon_ID2:
exon_geneid = str(exon.attributes['gene_id'])
exon_geneid = separate(exon_geneid)
exon_transcriptid = str(exon.attributes['transcript_id'])
exon_transcriptid = separate(exon_transcriptid)
New_BLAST_Row = [exon_ID, "{}{}{}{}".format(species2name,'(', species2version, ')'),
("{}:{}-{}:{}:{}".format(exon.chrom, exon.start, exon.stop,
exon.strand, exon.frame)), exon_geneid,
exon_transcriptid, exon.stop - exon.start + 1]
BLAST_Table.append(New_BLAST_Row)
BLAST_Table = pd.DataFrame(BLAST_Table)
BLAST_Table.columns = ['Exon', 'Species(Version)', 'Chrom:Start-Stop:Strand:Offset', 'Gene', 'Transcript', 'Exon_Length']
# fill in row for paralogous or orthologous
rows = int(Ortho_Table.size/7)
blastRows = int(BLAST_Table.size/6)
for row in range (0, rows):
# paralogous
if ((Ortho_Table.iat[row, 0])[0:7]) != (Ortho_Table.iat[row, 1])[0:7]):
Ortho_Table.ix[row, 'Relationship'] = 'Orthologous'
# orthologous
else:
exonOne = Ortho_Table.iat[row, 0]
exonTwo = Ortho_Table.iat[row, 1]
locationOne = 0
locationTwo = 1
start1 = '0'
start2 = '1'
end1 = '0'
end2 = '1'
for innerrow in range (0, blastRows):
if (exonOne == BLAST_Table.iat[innerrow, 0]):
locationOne = BLAST_Table.iat[innerrow, 2]
start1 = splitstart(locationOne)
end1 = splitend(locationOne)
if exonTwo == BLAST_Table.iat[innerrow, 0]:
locationTwo = BLAST_Table.iat[innerrow, 2]
start2 = splitstart(locationTwo)
end2 = splitend(locationTwo)
if (((start1 >= start2) and (end2 >= end1)) or
((start2 >= start1) and (end1 >= end2)) or
((start2 >= start1) and (end2 >= end1) and
(start2 <= end1)) or
((start1 >= start2) and (end1 >= end2) and
(start1 <= end2))):
Ortho_Table.iat[row, 6] = 'Overlapping Genomic Loci'
if locationOne == locationTwo:
Ortho_Table.iat[row, 6] = 'Identical Genomic Loci'
else:
Ortho_Table.iat[row, 6] = 'Paralogous'
#to save BLAST_Table
BLAST_Table.to_csv("BLAST_Table.csv", columns= ['Exon', 'Species(Version)', 'Chrom:Start-Stop:Strand:Offset',
'Gene', 'Transcript', 'Exon_Length'], index=False)
#to save ortho_table
Ortho_Table.to_csv("ortho_table.csv", columns= ['Exon', 'Exon2', 'Nuc_Length_of_Overlap', 'Prot_Length_of_Overlap',
'Nuc_PID', 'Prot_PID', 'Relationship'], index=False) | 41.565657 | 129 | 0.569068 | 3,305 | 28,805 | 4.753101 | 0.081694 | 0.073843 | 0.028964 | 0.024445 | 0.811064 | 0.779298 | 0.7567 | 0.729582 | 0.712903 | 0.703418 | 0 | 0.020812 | 0.322756 | 28,805 | 693 | 130 | 41.565657 | 0.784447 | 0 | 0 | 0.545455 | 0 | 0 | 0.092166 | 0.030393 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.012285 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed493407897cce23c303815d343cc2b6b9102810 | 1,207 | py | Python | xadmin/demo_app/app/migrations/0011_auto_20201117_1427.py | HelloN1co/A-Detection-Tool-for-Traffic-Objects | ead815d3968559dd640257ca946f86ad390495b6 | [
"MIT"
] | null | null | null | xadmin/demo_app/app/migrations/0011_auto_20201117_1427.py | HelloN1co/A-Detection-Tool-for-Traffic-Objects | ead815d3968559dd640257ca946f86ad390495b6 | [
"MIT"
] | null | null | null | xadmin/demo_app/app/migrations/0011_auto_20201117_1427.py | HelloN1co/A-Detection-Tool-for-Traffic-Objects | ead815d3968559dd640257ca946f86ad390495b6 | [
"MIT"
] | null | null | null | # Generated by Django 2.1 on 2020-11-17 14:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0010_auto_20201117_1425'),
]
operations = [
migrations.AlterField(
model_name='collect',
name='destFilePath',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='collect',
name='srcFilePath',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='collect',
name='thumnail',
field=models.CharField(default='', max_length=256),
),
migrations.AlterField(
model_name='history',
name='destFilePath',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='history',
name='srcFilePath',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='history',
name='thumnail',
field=models.CharField(default='', max_length=256),
),
]
| 27.431818 | 63 | 0.553438 | 108 | 1,207 | 6.046296 | 0.361111 | 0.183767 | 0.229709 | 0.266462 | 0.777948 | 0.777948 | 0.722818 | 0.722818 | 0.722818 | 0.523737 | 0 | 0.059333 | 0.329743 | 1,207 | 43 | 64 | 28.069767 | 0.747837 | 0.035626 | 0 | 0.810811 | 1 | 0 | 0.111876 | 0.019793 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.027027 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ed64e0b624129c25328b28083c4e03e1850184ea | 2,726 | py | Python | baquet/sql/directory.py | calebglawson/baquet | 9b28ed9bb2f0247b392b2fa5cafed7d0840b45e6 | [
"MIT"
] | null | null | null | baquet/sql/directory.py | calebglawson/baquet | 9b28ed9bb2f0247b392b2fa5cafed7d0840b45e6 | [
"MIT"
] | 46 | 2020-06-11T00:54:56.000Z | 2020-10-11T22:55:48.000Z | baquet/sql/directory.py | calebglawson/baquet | 9b28ed9bb2f0247b392b2fa5cafed7d0840b45e6 | [
"MIT"
] | null | null | null | '''
Directory of users in the users folder, used for quick lookups.
'''
from sqlalchemy import Column, Integer, String, Boolean, DateTime
from sqlalchemy.ext.declarative import declarative_base
BASE = declarative_base()
class DirectorySQL(BASE):
'''
Stores the top level info for a user in the user folder.
'''
__tablename__ = 'directory'
contributors_enabled = Column(Boolean)
created_at = Column(DateTime)
default_profile = Column(Boolean)
default_profile_image = Column(Boolean)
description = Column(String)
entities = Column(String)
favorites_count = Column(Integer)
followers_count = Column(Integer)
friends_count = Column(Integer)
geo_enabled = Column(Boolean)
has_extended_profile = Column(Boolean)
user_id = Column(String, primary_key=True)
is_translation_enabled = Column(Boolean)
is_translator = Column(Boolean)
lang = Column(String)
listed_count = Column(Integer)
location = Column(String)
name = Column(String)
needs_phone_verification = Column(Boolean)
profile_banner_url = Column(String)
profile_image_url = Column(String)
protected = Column(Boolean)
screen_name = Column(String)
statuses_count = Column(Integer)
suspended = Column(Boolean)
url = Column(String)
verified = Column(Boolean)
last_updated = Column(DateTime)
class CacheSQL(BASE):
'''
Ephemeral store of every user ever encountered.
'''
__tablename__ = 'cache'
contributors_enabled = Column(Boolean)
created_at = Column(DateTime)
default_profile = Column(Boolean)
default_profile_image = Column(Boolean)
description = Column(String)
entities = Column(String)
favorites_count = Column(Integer)
followers_count = Column(Integer)
friends_count = Column(Integer)
geo_enabled = Column(Boolean)
has_extended_profile = Column(Boolean)
user_id = Column(String, primary_key=True)
is_translation_enabled = Column(Boolean)
is_translator = Column(Boolean)
lang = Column(String)
listed_count = Column(Integer)
location = Column(String)
name = Column(String)
needs_phone_verification = Column(Boolean)
profile_banner_url = Column(String)
profile_image_url = Column(String)
protected = Column(Boolean)
screen_name = Column(String)
statuses_count = Column(Integer)
suspended = Column(Boolean)
url = Column(String)
verified = Column(Boolean)
last_updated = Column(DateTime)
class TempJoinSQL(BASE):
'''
Table to temporarily join data, rather than using in_().
'''
__tablename__ = 'temp_joins'
join_id = Column(String, primary_key=True)
join_on = Column(String, primary_key=True)
| 30.977273 | 65 | 0.712399 | 311 | 2,726 | 6.012862 | 0.282958 | 0.152941 | 0.096257 | 0.047059 | 0.781818 | 0.767914 | 0.752941 | 0.752941 | 0.752941 | 0.752941 | 0 | 0 | 0.199193 | 2,726 | 87 | 66 | 31.333333 | 0.856619 | 0.082539 | 0 | 0.835821 | 0 | 0 | 0.009804 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.029851 | 0 | 0.985075 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
ed72b0e2b6a5ff579848eaecd9b60ad323500674 | 1,674 | py | Python | src/common/validator.py | BIBSYSDEV/data-persistence | 672c1aad916cc60e8585cfc6fd29e818ca5824c1 | [
"MIT-0"
] | null | null | null | src/common/validator.py | BIBSYSDEV/data-persistence | 672c1aad916cc60e8585cfc6fd29e818ca5824c1 | [
"MIT-0"
] | null | null | null | src/common/validator.py | BIBSYSDEV/data-persistence | 672c1aad916cc60e8585cfc6fd29e818ca5824c1 | [
"MIT-0"
] | null | null | null | from common.constants import Constants
def validate_resource(operation, resource):
if operation == Constants.OPERATION_MODIFY:
if resource.resource_identifier is None:
raise ValueError('Resource has no identifier')
elif resource.metadata is None:
raise ValueError('Resource with identifier ' + resource.resource_identifier + ' has no metadata')
elif resource.files is None:
raise ValueError('Resource with identifier ' + resource.resource_identifier + ' has no files')
elif resource.owner is None:
raise ValueError('Resource with identifier ' + resource.resource_identifier + ' has no owner')
elif not isinstance(resource.metadata, dict):
raise ValueError(
'Resource with identifier ' + resource.resource_identifier + ' has invalid attribute type for metadata')
elif not isinstance(resource.files, dict):
raise ValueError(
'Resource with identifier ' + resource.resource_identifier + ' has invalid attribute type for files')
elif operation == Constants.OPERATION_INSERT:
if resource.metadata is None:
raise ValueError('Resource has no metadata')
elif resource.files is None:
raise ValueError('Resource has no files')
elif resource.owner is None:
raise ValueError('Resource has no owner')
elif not isinstance(resource.metadata, dict):
raise ValueError('Resource has invalid attribute type for metadata')
elif not isinstance(resource.files, dict):
raise ValueError('Resource has invalid attribute type for files')
| 54 | 120 | 0.676822 | 183 | 1,674 | 6.142077 | 0.163934 | 0.146797 | 0.225089 | 0.130783 | 0.83452 | 0.83452 | 0.825623 | 0.741103 | 0.741103 | 0.717972 | 0 | 0 | 0.253883 | 1,674 | 30 | 121 | 55.8 | 0.89992 | 0 | 0 | 0.357143 | 0 | 0 | 0.256272 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035714 | false | 0 | 0.035714 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
71e93f9faab7b85c5bdc60f97579592e02e6b528 | 4,635 | py | Python | tests/web/test_assets.py | ldmberman/bigchaindb | d2685052a676bf7bb274135a559b501c4cb17db7 | [
"Apache-2.0"
] | null | null | null | tests/web/test_assets.py | ldmberman/bigchaindb | d2685052a676bf7bb274135a559b501c4cb17db7 | [
"Apache-2.0"
] | null | null | null | tests/web/test_assets.py | ldmberman/bigchaindb | d2685052a676bf7bb274135a559b501c4cb17db7 | [
"Apache-2.0"
] | null | null | null | import pytest
ASSETS_ENDPOINT = '/api/v1/assets/'
@pytest.mark.tendermint
def test_get_assets_with_empty_text_search(client):
res = client.get(ASSETS_ENDPOINT + '?search=')
assert res.json == {'status': 400,
'message': 'text_search cannot be empty'}
assert res.status_code == 400
@pytest.mark.tendermint
def test_get_assets_with_missing_text_search(client):
res = client.get(ASSETS_ENDPOINT)
assert res.status_code == 400
@pytest.mark.genesis
def test_get_assets(client, b):
from bigchaindb.models import Transaction
from bigchaindb.backend.mongodb.connection import MongoDBConnection
if isinstance(b.connection, MongoDBConnection):
# test returns empty list when no assets are found
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.json == []
assert res.status_code == 200
# create asset
asset = {'msg': 'abc'}
tx = Transaction.create([b.me], [([b.me], 1)],
asset=asset).sign([b.me_private])
# create block
block = b.create_block([tx])
b.write_block(block)
# vote valid
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# test that asset is returned
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.status_code == 200
assert len(res.json) == 1
assert res.json[0] == {
'data': {'msg': 'abc'},
'id': tx.id
}
else:
# test that the correct error is returned if not running MongoDB
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.status_code == 400
assert res.json['message'].startswith('(OperationError)')
@pytest.mark.genesis
def test_get_assets_limit(client, b):
from bigchaindb.models import Transaction
from bigchaindb.backend.mongodb.connection import MongoDBConnection
if isinstance(b.connection, MongoDBConnection):
# create two assets
asset1 = {'msg': 'abc 1'}
asset2 = {'msg': 'abc 2'}
tx1 = Transaction.create([b.me], [([b.me], 1)],
asset=asset1).sign([b.me_private])
tx2 = Transaction.create([b.me], [([b.me], 1)],
asset=asset2).sign([b.me_private])
# create block
block = b.create_block([tx1, tx2])
b.write_block(block)
# vote valid
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# test that both assets are returned without limit
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.status_code == 200
assert len(res.json) == 2
# test that only one asset is returned when using limit=1
res = client.get(ASSETS_ENDPOINT + '?search=abc&limit=1')
assert res.status_code == 200
assert len(res.json) == 1
@pytest.mark.bdb
@pytest.mark.tendermint
@pytest.mark.localmongodb
def test_get_assets_tendermint(client, tb):
from bigchaindb.models import Transaction
# test returns empty list when no assets are found
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.json == []
assert res.status_code == 200
# create asset
asset = {'msg': 'abc'}
tx = Transaction.create([tb.me], [([tb.me], 1)],
asset=asset).sign([tb.me_private])
tb.store_transaction(tx)
# test that asset is returned
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.status_code == 200
assert len(res.json) == 1
assert res.json[0] == {
'data': {'msg': 'abc'},
'id': tx.id
}
@pytest.mark.bdb
@pytest.mark.tendermint
@pytest.mark.localmongodb
def test_get_assets_limit_tendermint(client, tb):
from bigchaindb.models import Transaction
b = tb
# create two assets
asset1 = {'msg': 'abc 1'}
asset2 = {'msg': 'abc 2'}
tx1 = Transaction.create([b.me], [([b.me], 1)],
asset=asset1).sign([b.me_private])
tx2 = Transaction.create([b.me], [([b.me], 1)],
asset=asset2).sign([b.me_private])
b.store_transaction(tx1)
b.store_transaction(tx2)
# test that both assets are returned without limit
res = client.get(ASSETS_ENDPOINT + '?search=abc')
assert res.status_code == 200
assert len(res.json) == 2
# test that only one asset is returned when using limit=1
res = client.get(ASSETS_ENDPOINT + '?search=abc&limit=1')
assert res.status_code == 200
assert len(res.json) == 1
| 32.412587 | 72 | 0.61165 | 600 | 4,635 | 4.61 | 0.153333 | 0.055315 | 0.047722 | 0.071584 | 0.895517 | 0.882863 | 0.880694 | 0.840926 | 0.737527 | 0.737527 | 0 | 0.021904 | 0.261273 | 4,635 | 142 | 73 | 32.640845 | 0.785923 | 0.115642 | 0 | 0.734694 | 0 | 0 | 0.065915 | 0 | 0 | 0 | 0 | 0 | 0.234694 | 1 | 0.061224 | false | 0 | 0.071429 | 0 | 0.132653 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9c1f72cd6342b5f5f015ecdfb782bb93738b0c5f | 11,407 | py | Python | dojo/unittests/test_vcg_parser.py | dr3dd589/django-DefectDojo | 7744cc4b54f294a300405db4f8e16c5408892a41 | [
"BSD-3-Clause"
] | 3 | 2020-07-15T12:57:14.000Z | 2020-10-14T14:32:40.000Z | dojo/unittests/test_vcg_parser.py | dr3dd589/django-DefectDojo | 7744cc4b54f294a300405db4f8e16c5408892a41 | [
"BSD-3-Clause"
] | 173 | 2020-12-17T20:29:09.000Z | 2022-03-23T12:54:32.000Z | dojo/unittests/test_vcg_parser.py | dr3dd589/django-DefectDojo | 7744cc4b54f294a300405db4f8e16c5408892a41 | [
"BSD-3-Clause"
] | 2 | 2022-02-07T09:57:28.000Z | 2022-03-11T08:42:59.000Z | import io
import csv
from defusedxml import ElementTree
from django.test import TestCase
from dojo.models import Test
from dojo.tools.vcg.parser import VCGCsvParser
from dojo.tools.vcg.parser import VCGParser
from dojo.tools.vcg.parser import VCGXmlParser
class TestFile(object):
def read(self):
return self.content
def __init__(self, name, content):
self.name = name
self.content = content
class TestVCGXmlParser(TestCase):
def setUp(self):
self.parser = VCGXmlParser()
def test_parse_no_content_no_findings(self):
results = self.parser.parse(None, Test())
self.assertEqual(0, len(results))
def test_parse_single_finding(self):
single_finding = """<?xml version="1.0" encoding="utf-8"?>
<!--XML Export of VCG Results for directory: C:\Projects\WebGoat.Net. Scanned for C# security issues.-->
<CodeIssueCollection>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
</CodeIssueCollection>"""
results = self.parser.parse(single_finding, Test())
self.assertEqual(1, len(results))
def test_parse_multiple_findings(self):
findings = """<?xml version="1.0" encoding="utf-8"?>
<!--XML Export of VCG Results for directory: C:\Projects\WebGoat.Net. Scanned for C# security issues.-->
<CodeIssueCollection>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>62</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
</CodeIssueCollection>"""
results = self.parser.parse(findings, Test())
self.assertEqual(2, len(results))
def test_parse_duplicate_findings_dedupes(self):
duplicate_finding = """<?xml version="1.0" encoding="utf-8"?>
<!--XML Export of VCG Results for directory: C:\Projects\WebGoat.Net. Scanned for C# security issues.-->
<CodeIssueCollection>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
</CodeIssueCollection>"""
results = self.parser.parse(duplicate_finding, Test())
self.assertEqual(1, len(results))
def test_parseissuexml_with_no_issue_has_no_finding(self):
self.assertIsNone(self.parser.parse_issue(None, Test()))
def test_parseissuexml_with_issue_has_finding(self):
single_finding = """<?xml version="1.0" encoding="utf-8"?>
<!--XML Export of VCG Results for directory: C:\Projects\WebGoat.Net. Scanned for C# security issues.-->
<CodeIssueCollection>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
</CodeIssueCollection>"""
vcgscan = ElementTree.fromstring(single_finding)
finding = self.parser.parse_issue(vcgscan.findall('CodeIssue')[0],
Test())
self.assertEqual('Info', finding.severity)
self.assertEqual('S4', finding.numerical_severity)
self.assertEqual('Comment Indicates Potentially Unfinished Code',
finding.title)
class TestVCGCsvParser(TestCase):
def setUp(self):
self.parser = VCGCsvParser()
def test_parse_no_csv_content_no_findings(self):
findings = ""
results = self.parser.parse(findings, Test())
self.assertEqual(0, len(results))
def test_parse_single_finding_single_result(self):
findings = """6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"""""
results = self.parser.parse(findings, Test())
self.assertEqual(1, len(results))
def test_parse_multiple_findings_multiple_results(self):
findings = """6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"
6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,41,"TODO: Add ability to delete an orderDetail and to change quantities.",False,"LawnGreen"""""
results = self.parser.parse(findings, Test())
self.assertEqual(2, len(results))
def test_parse_duplicate_findings_deduped_results(self):
findings = """6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"
6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"""""
results = self.parser.parse(findings, Test())
self.assertEqual(1, len(results))
def test_parseissuerow_with_no_row_has_no_finding(self):
finding = self.parser.parse_issue(None, Test())
self.assertIsNone(finding)
def test_parseissuerow_with_empty_row_has_no_finding(self):
row = dict()
finding = self.parser.parse_issue(row, Test())
self.assertIsNone(finding)
def test_parseissuerow_with_row_has_finding(self):
findings = """6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"""""
reader = csv.reader(io.StringIO(findings), delimiter=',',
quotechar='"')
finding = None
for row in reader:
finding = self.parser.parse_issue(row, Test())
self.assertIsNotNone(finding)
self.assertEqual('Info', finding.severity)
self.assertEqual('S4', finding.numerical_severity)
self.assertEqual('Comment Indicates Potentially Unfinished Code',
finding.title)
class TestVCGImport(TestCase):
def setUp(self):
self.parser = VCGParser(None, Test())
def test_caninitialize(self):
self.assertIsInstance(self.parser, VCGParser)
def test_can_parse_xml(self):
content = """<?xml version="1.0" encoding="utf-8"?>
<!--XML Export of VCG Results for directory: C:\Projects\WebGoat.Net. Scanned for C# security issues.-->
<CodeIssueCollection>
<CodeIssue>
<Priority>6</Priority>
<Severity>Suspicious Comment</Severity>
<Title>Comment Indicates Potentially Unfinished Code</Title>
<Description>The comment includes some wording which indicates that the developer regards
it as unfinished or does not trust it to work correctly.</Description>
<FileName>Findings.xml</FileName>
<Line>21</Line>
<CodeLine>TODO: Check the Code</CodeLine>
<Checked>False</Checked>
<CheckColour>LawnGreen</CheckColour>
</CodeIssue>
</CodeIssueCollection>"""
filename = TestFile('data.xml', content)
self.parser = VCGParser(filename, Test())
self.assertEqual(1, len(self.parser.items))
def test_can_parse_csv(self):
content = """6,Suspicious Comment,"Comment Indicates Potentially Unfinished Code","The comment includes some wording which indicates that the developer regards it as unfinished or does not trust it to work correctly.",C:\Projects\WebGoat.Net\Core\Cart.cs,16,"TODO: Refactor this. Use LINQ with aggregation to get SUM.",False,"LawnGreen"""""
filename = TestFile('data.csv', content)
self.parser = VCGParser(filename, Test())
self.assertEqual(1, len(self.parser.items))
| 48.747863 | 349 | 0.686771 | 1,351 | 11,407 | 5.73131 | 0.111769 | 0.027121 | 0.055792 | 0.076456 | 0.866977 | 0.857678 | 0.82849 | 0.82849 | 0.80266 | 0.796074 | 0 | 0.007755 | 0.208731 | 11,407 | 233 | 350 | 48.957082 | 0.8501 | 0 | 0 | 0.690355 | 0 | 0.060914 | 0.646708 | 0.164548 | 0 | 0 | 0 | 0 | 0.106599 | 1 | 0.106599 | false | 0 | 0.045685 | 0.005076 | 0.177665 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9c262920e1ea806c0983986652d3266e614a9dd0 | 9,772 | py | Python | authenticator/tests/test_application.py | SamanthaFeidFischer/arxiv-auth | 08df4e0196a04a06eac1d26477b3ad56ebf56f08 | [
"MIT"
] | 1 | 2018-12-11T05:02:22.000Z | 2018-12-11T05:02:22.000Z | authenticator/tests/test_application.py | SamanthaFeidFischer/arxiv-auth | 08df4e0196a04a06eac1d26477b3ad56ebf56f08 | [
"MIT"
] | 14 | 2019-06-20T13:36:13.000Z | 2021-06-25T15:19:59.000Z | authenticator/tests/test_application.py | SamanthaFeidFischer/arxiv-auth | 08df4e0196a04a06eac1d26477b3ad56ebf56f08 | [
"MIT"
] | null | null | null | """API tests for the authenticator service."""
from unittest import TestCase, mock
import json
from datetime import datetime, timedelta
from pytz import timezone
import jwt
import arxiv.users.auth.sessions.store
from arxiv import status
from authenticator.factory import create_app
EASTERN = timezone('US/Eastern')
class TestAuthorizeWithCookie(TestCase):
def setUp(self):
self.app = create_app()
self.app.config['AUTH_SESSION_COOKIE_NAME'] = 'foocookie'
self.client = self.app.test_client()
def test_no_auth_data(self):
"""Neither an authorization token nor cookie are passed."""
response = self.client.get('/auth')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@mock.patch('authenticator.services.sessions')
def test_not_a_token(self, mock_sessions):
"""Something other than a JWT is passed."""
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
'definitelynotatoken')
response = self.client.get('/auth')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@mock.patch('authenticator.services.sessions')
def test_malformed_token(self, mock_sessions):
"""A cookie with missing claims is passed."""
required_claims = ['session_id', 'user_id', 'nonce']
for exc in required_claims:
claims = {claim: '' for claim in required_claims if claim != exc}
bad_token = jwt.encode(claims, self.app.config['JWT_SECRET']) \
.decode('utf-8')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
bad_token)
response = self.client.get('/auth')
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions')
def test_token_with_bad_encryption(self, mock_sessions):
"""A cookie produced with a different secret is passed."""
claims = {
'user_id': '1234',
'session_id': 'ajx9043jjx00s',
'nonce': '0039299290099'
}
bad_token = jwt.encode(claims, 'nottherightsecret')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
bad_token)
response = self.client.get('/auth')
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load')
def test_expired_token(self, mock_load):
"""The session is expired."""
mock_load.side_effect = arxiv.users.auth.sessions.store.ExpiredToken
claims = {
'user_id': '1234',
'session_id': 'ajx9043jjx00s',
'nonce': '0039299290099'
}
expired_token = jwt.encode(claims, self.app.config['JWT_SECRET']) \
.decode('utf-8')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
expired_token)
response = self.client.get('/auth')
print(response.data)
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load')
def test_other_forged_token(self, mock_load):
"""An invalid cookie is passed."""
mock_load.side_effect = arxiv.users.auth.sessions.store.InvalidToken
claims = {
'user_id': '1234',
'session_id': 'ajx9043jjx00s',
'nonce': '0039299290099'
}
forged_token = jwt.encode(claims, self.app.config['JWT_SECRET']) \
.decode('utf-8')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
forged_token)
response = self.client.get('/auth')
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load')
def test_empty_session(self, mock_load):
"""Session has been removed, or may never have existed."""
mock_load.side_effect = arxiv.users.auth.sessions.store.UnknownSession
claims = {
'user_id': '1234',
'session_id': 'ajx9043jjx00s',
'nonce': '0039299290099'
}
token = jwt.encode(claims, self.app.config['JWT_SECRET']) \
.decode('utf-8')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
token)
response = self.client.get('/auth')
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load')
def test_valid_token(self, mock_load):
"""A valid cookie is passed."""
session = arxiv.users.domain.Session(
user=arxiv.users.domain.User(
user_id='1234',
username='foouser',
email='foo@bar.com'
),
start_time=datetime.now().isoformat(),
session_id='ajx9043jjx00s',
nonce='0039299290098'
)
mock_load.return_value = session
claims = {
'user_id': '1234',
'session_id': 'ajx9043jjx00s',
'nonce': '0039299290098'
}
token = jwt.encode(claims, self.app.config['JWT_SECRET']) \
.decode('utf-8')
self.client.set_cookie('', self.app.config['AUTH_SESSION_COOKIE_NAME'],
token)
response = self.client.get('/auth')
self.assertEqual(response.status_code,
status.HTTP_200_OK)
self.assertIn('Token', response.headers,
'Token header is set in response')
expected_jwt = jwt.encode(
arxiv.users.domain.to_dict(session),
self.app.config['JWT_SECRET']
).decode('utf-8')
self.assertEqual(response.headers['Token'], expected_jwt)
class TestAuthorizeWithHeader(TestCase):
"""Tests for :func:`session_store.get_token_session`."""
def setUp(self):
"""Instantiate the authenticator app for testing."""
self.app = create_app()
self.app.config['AUTH_SESSION_COOKIE_NAME'] = 'foocookie'
self.client = self.app.test_client()
@mock.patch('authenticator.services.sessions.load_by_id')
def test_not_a_token(self, mock_load):
"""Something other than a token is passed."""
mock_load.side_effect = arxiv.users.auth.sessions.store.UnknownSession
headers = {'Authorization': 'Bearer notthetokenyouarelookingfor'}
response = self.client.get('/auth', headers=headers)
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load_by_id')
def test_expired_token(self, mock_load):
"""An expired token is passed."""
mock_load.side_effect = arxiv.users.auth.sessions.store.ExpiredToken
headers = {'Authorization': 'Bearer foo'}
response = self.client.get('/auth', headers=headers)
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load_by_id')
def test_invalid_token(self, mock_load):
"""An invalid token is passed."""
mock_load.side_effect = arxiv.users.auth.sessions.store.InvalidToken
headers = {'Authorization': 'Bearer foo'}
response = self.client.get('/auth', headers=headers)
self.assertEqual(response.status_code,
status.HTTP_401_UNAUTHORIZED)
data = json.loads(response.data)
self.assertIn('reason', data, 'Response includes failure reason')
@mock.patch('authenticator.services.sessions.load_by_id')
def test_valid_token(self, mock_load):
"""A valid token is passed."""
session = arxiv.users.domain.Session(
user=arxiv.users.domain.User(
user_id='1234',
username='foouser',
email='foo@bar.com'
),
start_time=datetime.now().isoformat(),
session_id='foo',
nonce='0039299290098'
)
mock_load.return_value = session
headers = {'Authorization': 'Bearer foo'}
response = self.client.get('/auth', headers=headers)
self.assertEqual(response.status_code,
status.HTTP_200_OK)
self.assertIn('Token', response.headers,
'Token header is set in response')
expected_jwt = jwt.encode(
arxiv.users.domain.to_dict(session),
self.app.config['JWT_SECRET']
).decode('utf-8')
self.assertEqual(response.headers['Token'], expected_jwt)
| 42.859649 | 83 | 0.612874 | 1,064 | 9,772 | 5.454887 | 0.135338 | 0.036182 | 0.035837 | 0.043418 | 0.829083 | 0.807202 | 0.793074 | 0.747071 | 0.739662 | 0.69142 | 0 | 0.027669 | 0.267704 | 9,772 | 227 | 84 | 43.048458 | 0.783399 | 0.058944 | 0 | 0.776042 | 0 | 0 | 0.188596 | 0.071053 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.072917 | false | 0 | 0.041667 | 0 | 0.125 | 0.005208 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
92c70c8378c23afe7909f7215beb9798a6fde549 | 201 | py | Python | demoproject/templatetags/demo_tags.py | wangsure/czxs | e45e2ffde1597cb5a41d5fb46818805a1fa0d848 | [
"BSD-3-Clause"
] | null | null | null | demoproject/templatetags/demo_tags.py | wangsure/czxs | e45e2ffde1597cb5a41d5fb46818805a1fa0d848 | [
"BSD-3-Clause"
] | null | null | null | demoproject/templatetags/demo_tags.py | wangsure/czxs | e45e2ffde1597cb5a41d5fb46818805a1fa0d848 | [
"BSD-3-Clause"
] | null | null | null | #from django import template
from django.template.defaultfilters import register
from django import template
register = template.Library()
@register.filter
def demo(value):
return value + 'demo'
| 20.1 | 51 | 0.78607 | 25 | 201 | 6.32 | 0.48 | 0.189873 | 0.202532 | 0.303797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139303 | 201 | 9 | 52 | 22.333333 | 0.913295 | 0.134328 | 0 | 0 | 0 | 0 | 0.023121 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0.166667 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
1314b47f718ce91d0170fe83bc6a69b1a7488712 | 176 | py | Python | zvt/recorders/emquantapi/holder/__init__.py | markqiu/zvt | 1bcfb71279f2652c3600f0f8e45d941f98ceaa10 | [
"MIT"
] | 6 | 2020-09-03T10:02:00.000Z | 2021-02-04T02:51:47.000Z | zvt/recorders/emquantapi/holder/__init__.py | wlwd13303/zvt | 23105a5bfdc3a5080c6c22d11e9e53d216688dea | [
"MIT"
] | null | null | null | zvt/recorders/emquantapi/holder/__init__.py | wlwd13303/zvt | 23105a5bfdc3a5080c6c22d11e9e53d216688dea | [
"MIT"
] | 2 | 2020-07-08T04:15:40.000Z | 2021-06-08T08:51:31.000Z | # -*- coding: utf-8 -*-
from zvt.recorders.emquantapi.holder.em_holdr_trade_detail_recorder import *
from zvt.recorders.emquantapi.holder.em_holdr_trade_plan_recorder import *
| 44 | 76 | 0.818182 | 25 | 176 | 5.44 | 0.6 | 0.102941 | 0.235294 | 0.382353 | 0.647059 | 0.647059 | 0.647059 | 0.647059 | 0 | 0 | 0 | 0.006135 | 0.073864 | 176 | 3 | 77 | 58.666667 | 0.828221 | 0.119318 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
133527ffc2fd076d5ee22f6739db530adf10aea1 | 20,238 | py | Python | test/messages/test_messages_incoming.py | kikinteractive/kik-python | 2902b2167608a28c7756beade874c8ae5d7b643c | [
"MIT"
] | 75 | 2016-04-05T15:59:45.000Z | 2022-01-24T08:22:09.000Z | test/messages/test_messages_incoming.py | kikinteractive/kik-python | 2902b2167608a28c7756beade874c8ae5d7b643c | [
"MIT"
] | 37 | 2016-04-07T03:15:52.000Z | 2021-06-23T23:17:06.000Z | test/messages/test_messages_incoming.py | kikinteractive/kik-python | 2902b2167608a28c7756beade874c8ae5d7b643c | [
"MIT"
] | 60 | 2016-04-06T05:45:59.000Z | 2021-12-29T06:09:24.000Z | from unittest import TestCase
from kik.messages import VideoMessage, UnknownMessage, TextMessage, StartChattingMessage, StickerMessage, \
ScanDataMessage, PictureMessage, LinkMessage, IsTypingMessage, ReadReceiptMessage, DeliveryReceiptMessage, \
SuggestedResponseKeyboard, TextResponse, FriendPickerResponse, FriendPickerMessage, PictureResponse
class KikBotMessagesIncomingTest(TestCase):
def test_text_message_incoming(self):
message = TextMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'body': 'Some text',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.body, 'Some text')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.chat_type, 'direct')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
def test_link_message_incoming(self):
message = LinkMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'url': 'http://foo.bar',
'title': 'A Title',
'text': 'Some text',
'noForward': True,
'kikJsData': 'somedata',
'attribution': {
'name': 'Webpage',
'iconUrl': 'http://foo.bar/icon'
},
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.url, 'http://foo.bar')
self.assertEqual(message.title, 'A Title')
self.assertEqual(message.text, 'Some text')
self.assertEqual(message.no_forward, True)
self.assertEqual(message.kik_js_data, 'somedata')
self.assertEqual(message.attribution.name, 'Webpage')
self.assertEqual(message.attribution.icon_url, 'http://foo.bar/icon')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertEqual(message.chat_type, 'direct')
self.assertIs(True, message.read_receipt_requested)
def test_picture_message_incoming(self):
message = PictureMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'picUrl': 'http://foo.bar/image',
'attribution': {
'name': 'Webpage',
'iconUrl': 'http://foo.bar/icon'
},
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'metadata': {'some': 'data'},
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.pic_url, 'http://foo.bar/image')
self.assertEqual(message.attribution.name, 'Webpage')
self.assertEqual(message.attribution.icon_url, 'http://foo.bar/icon')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
self.assertEqual(message.chat_type, 'direct')
self.assertEqual(message.metadata, {'some': 'data'})
def test_video_message_incoming(self):
message = VideoMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'videoUrl': 'http://foo.bar/vid',
'muted': False,
'autoplay': True,
'loop': False,
'attribution': {
'name': 'Webpage',
'iconUrl': 'http://foo.bar/icon'
},
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.video_url, 'http://foo.bar/vid')
self.assertIs(False, message.muted)
self.assertIs(True, message.autoplay)
self.assertIs(False, message.loop)
self.assertEqual(message.attribution.name, 'Webpage')
self.assertEqual(message.attribution.icon_url, 'http://foo.bar/icon')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.chat_type, 'direct')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
def test_start_chatting_incoming(self):
message = StartChattingMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertEqual(message.chat_type, 'direct')
self.assertIs(False, message.read_receipt_requested)
def test_sticker_message_incoming(self):
message = StickerMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'stickerPackId': 'memes',
'stickerUrl': 'http://cards-sticker-dev.herokuapp.com/stickers/memes/okay.png',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.sticker_pack_id, 'memes')
self.assertEqual(message.sticker_url, 'http://cards-sticker-dev.herokuapp.com/stickers/memes/okay.png')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertEqual(message.chat_type, 'direct')
self.assertIs(False, message.read_receipt_requested)
def test_scan_data_message_incoming(self):
message = ScanDataMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'data': 'foobar',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.data, 'foobar')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertEqual(message.chat_type, 'direct')
self.assertIs(False, message.read_receipt_requested)
def test_is_typing_incoming(self):
message = IsTypingMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'isTyping': True,
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertIs(True, message.is_typing)
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.chat_type, 'direct')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(False, message.read_receipt_requested)
def test_read_receipt_incoming(self):
message = ReadReceiptMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'messageIds': ['ff3ea373-576c-45d4-bdcd-9956a156301d'],
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.message_ids, ['ff3ea373-576c-45d4-bdcd-9956a156301d'])
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(False, message.read_receipt_requested)
def test_delivery_receipt_incoming(self):
message = DeliveryReceiptMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'messageIds': ['ff3ea373-576c-45d4-bdcd-9956a156301d'],
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.message_ids, ['ff3ea373-576c-45d4-bdcd-9956a156301d'])
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(False, message.read_receipt_requested)
def test_friend_picker_message_incoming(self):
message = FriendPickerMessage.from_json({
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'picked': ['foobar'],
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False,
'chatType': 'direct'
})
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.picked, ['foobar'])
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertEqual(message.chat_type, 'direct')
self.assertIs(False, message.read_receipt_requested)
def test_unknown_message_incoming(self):
message_json = {
'type': 'some-unknown-type',
'from': 'aleem',
'participants': ['aleem'],
'mention': None,
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'anUnknownProperty': ['With', 'Some', 'Values'],
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': False
}
message = UnknownMessage.from_json(message_json)
self.assertEqual(message.type, 'some-unknown-type')
self.assertEqual(message.from_user, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertIsNone(message.mention)
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.raw_message, message_json)
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(False, message.read_receipt_requested)
def test_suggested_keyboard_message(self):
message = TextMessage.from_json({
'to': 'aleem',
'participants': ['aleem'],
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'body': 'Some text',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'keyboards': [
{
'to': 'aleem',
'type': 'suggested',
'hidden': False,
'responses': [
{
'type': 'picture',
'picUrl': 'http://foo.bar',
'metadata': {'some': 'data'}
},
{
'type': 'text',
'body': 'Ok!'
},
{
'type': 'text',
'body': 'No way!'
},
{
'type': 'friend-picker',
'body': 'Pick a friend!',
'min': 1,
'max': 5,
'preselected': ['foo', 'bar']
}
]
}
]
})
self.assertEqual(message.to, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.body, 'Some text')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
responses = [
PictureResponse('http://foo.bar', {'some': 'data'}), TextResponse('Ok!'), TextResponse('No way!'),
FriendPickerResponse('Pick a friend!', 1, 5, ['foo', 'bar'])
]
self.assertEqual(message.keyboards, [SuggestedResponseKeyboard(to='aleem', hidden=False, responses=responses)])
def test_unknown_keyboard_message(self):
keyboard_json = {'to': 'aleem', 'type': 'some-unknown-type', 'hidden': False}
message = TextMessage.from_json({
'to': 'aleem',
'participants': ['aleem'],
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'body': 'Some text',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'keyboards': [keyboard_json]
})
self.assertEqual(message.to, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.body, 'Some text')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
self.assertIsInstance(message.keyboards, list)
self.assertEqual(len(message.keyboards), 1)
self.assertEqual(message.keyboards[0].to, 'aleem')
self.assertEqual(message.keyboards[0].type, 'some-unknown-type')
self.assertEqual(message.keyboards[0].hidden, False)
self.assertEqual(message.keyboards[0].raw_keyboard, keyboard_json)
def test_unknown_suggested_response(self):
response_json = {'type': 'some-unknown-type', 'prop': 'Ok!'}
message = TextMessage.from_json({
'to': 'aleem',
'participants': ['aleem'],
'chatId': 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2',
'body': 'Some text',
'id': '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0',
'timestamp': 1458336131,
'readReceiptRequested': True,
'keyboards': [
{
'to': 'aleem',
'type': 'suggested',
'hidden': False,
'responses': [response_json]
}
]
})
self.assertEqual(message.to, 'aleem')
self.assertEqual(message.participants, ['aleem'])
self.assertEqual(message.chat_id, 'c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2')
self.assertEqual(message.body, 'Some text')
self.assertEqual(message.id, '8e7fc0ad-36aa-43dd-8c5f-e72f5f2ed7e0')
self.assertEqual(message.timestamp, 1458336131)
self.assertIs(True, message.read_receipt_requested)
self.assertIsInstance(message.keyboards, list)
self.assertEqual(len(message.keyboards), 1)
self.assertEqual(message.keyboards[0].to, 'aleem')
self.assertEqual(message.keyboards[0].type, 'suggested')
self.assertEqual(message.keyboards[0].hidden, False)
self.assertIsInstance(message.keyboards[0].responses, list)
self.assertEqual(len(message.keyboards[0].responses), 1)
self.assertEqual(message.keyboards[0].responses[0].type, 'some-unknown-type')
self.assertEqual(message.keyboards[0].responses[0].raw_response, response_json)
| 46.847222 | 119 | 0.624321 | 1,671 | 20,238 | 7.46918 | 0.087373 | 0.147825 | 0.211522 | 0.043266 | 0.833908 | 0.814999 | 0.811874 | 0.806266 | 0.792725 | 0.785354 | 0 | 0.135492 | 0.252396 | 20,238 | 431 | 120 | 46.955916 | 0.689425 | 0 | 0 | 0.714286 | 0 | 0 | 0.283625 | 0.155351 | 0 | 0 | 0 | 0 | 0.393484 | 1 | 0.037594 | false | 0 | 0.005013 | 0 | 0.045113 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
133d33996f9c48d22ebedb9c7f4a6b4766c5b1f1 | 21,544 | py | Python | tensorflow/python/kernel_tests/bincount_op_test.py | yage99/tensorflow | c7fa71b32a3635eb25596ae80d007b41007769c4 | [
"Apache-2.0"
] | 74 | 2020-07-06T17:11:39.000Z | 2022-01-28T06:31:28.000Z | tensorflow/python/kernel_tests/bincount_op_test.py | sseung0703/tensorflow | be084bd7a4dd241eb781fc704f57bcacc5c9b6dd | [
"Apache-2.0"
] | 88 | 2020-11-24T08:18:10.000Z | 2022-03-25T20:28:30.000Z | tensorflow/python/kernel_tests/bincount_op_test.py | sseung0703/tensorflow | be084bd7a4dd241eb781fc704f57bcacc5c9b6dd | [
"Apache-2.0"
] | 12 | 2020-07-08T07:27:17.000Z | 2021-12-27T08:54:27.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for bincount_ops.bincount."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import bincount_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import googletest
class BincountTest(test_util.TensorFlowTestCase):
def test_empty(self):
with self.session(use_gpu=True):
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([], minlength=5)),
[0, 0, 0, 0, 0])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([], minlength=1)), [0])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([], minlength=0)), [])
self.assertEqual(
self.evaluate(
bincount_ops.bincount([], minlength=0, dtype=np.float32)).dtype,
np.float32)
self.assertEqual(
self.evaluate(
bincount_ops.bincount([], minlength=3, dtype=np.float64)).dtype,
np.float64)
def test_values(self):
with self.session(use_gpu=True):
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([1, 1, 1, 2, 2, 3])),
[0, 3, 2, 1])
arr = [1, 1, 2, 1, 2, 3, 1, 2, 3, 4, 1, 2, 3, 4, 5]
self.assertAllEqual(
self.evaluate(bincount_ops.bincount(arr)), [0, 5, 4, 3, 2, 1])
arr += [0, 0, 0, 0, 0, 0]
self.assertAllEqual(
self.evaluate(bincount_ops.bincount(arr)), [6, 5, 4, 3, 2, 1])
self.assertAllEqual(self.evaluate(bincount_ops.bincount([])), [])
self.assertAllEqual(self.evaluate(bincount_ops.bincount([0, 0, 0])), [3])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([5])), [0, 0, 0, 0, 0, 1])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount(np.arange(10000))),
np.ones(10000))
def test_maxlength(self):
with self.session(use_gpu=True):
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([5], maxlength=3)), [0, 0, 0])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([1], maxlength=3)), [0, 1])
self.assertAllEqual(
self.evaluate(bincount_ops.bincount([], maxlength=3)), [])
def test_random_with_weights(self):
num_samples = 10000
with self.session(use_gpu=True):
np.random.seed(42)
for dtype in [dtypes.int32, dtypes.int64, dtypes.float32, dtypes.float64]:
arr = np.random.randint(0, 1000, num_samples)
if dtype == dtypes.int32 or dtype == dtypes.int64:
weights = np.random.randint(-100, 100, num_samples)
else:
weights = np.random.random(num_samples)
self.assertAllClose(
self.evaluate(bincount_ops.bincount(arr, weights)),
np.bincount(arr, weights))
def test_random_without_weights(self):
num_samples = 10000
with self.session(use_gpu=True):
np.random.seed(42)
for dtype in [np.int32, np.float32]:
arr = np.random.randint(0, 1000, num_samples)
weights = np.ones(num_samples).astype(dtype)
self.assertAllClose(
self.evaluate(bincount_ops.bincount(arr, None)),
np.bincount(arr, weights))
def test_zero_weights(self):
with self.session(use_gpu=True):
self.assertAllEqual(
self.evaluate(bincount_ops.bincount(np.arange(1000), np.zeros(1000))),
np.zeros(1000))
def test_negative(self):
# unsorted_segment_sum will only report InvalidArgumentError on CPU
with self.cached_session(), ops.device("/CPU:0"):
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(bincount_ops.bincount([1, 2, 3, -1, 6, 8]))
def test_shape_function(self):
# size must be scalar.
with self.assertRaisesRegex(
(ValueError, errors.InvalidArgumentError),
"Shape must be rank 0 but is rank 1 .*Bincount"):
gen_math_ops.bincount([1, 2, 3, 1, 6, 8], [1], [])
# size must be positive.
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"must be non-negative"):
gen_math_ops.bincount([1, 2, 3, 1, 6, 8], -5, [])
# if size is a constant then the shape is known.
v1 = gen_math_ops.bincount([1, 2, 3, 1, 6, 8], 5, [])
self.assertAllEqual(v1.get_shape().as_list(), [5])
# if size is a placeholder then the shape is unknown.
with ops.Graph().as_default():
s = array_ops.placeholder(dtype=dtypes.int32)
v2 = gen_math_ops.bincount([1, 2, 3, 1, 6, 8], s, [])
self.assertAllEqual(v2.get_shape().as_list(), [None])
class BincountOpTest(test_util.TensorFlowTestCase, parameterized.TestCase):
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_bincount_all_count(self, dtype):
np.random.seed(42)
size = 1000
inp = np.random.randint(0, size, (4096), dtype=dtype)
np_out = np.bincount(inp, minlength=size)
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(input=inp, weights=[], size=size)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_bincount_all_count_with_weights(self, dtype):
np.random.seed(42)
size = 1000
inp = np.random.randint(0, size, (4096,), dtype=dtype)
np_weight = np.random.random((4096,))
np_out = np.bincount(inp, minlength=size, weights=np_weight)
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(
input=inp, weights=np_weight, size=size)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_bincount_all_binary(self, dtype):
np.random.seed(42)
size = 10
inp = np.random.randint(0, size, (4096), dtype=dtype)
np_out = np.ones((size,))
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(
input=inp, weights=[], size=size, binary_output=True)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_bincount_all_binary_with_weights(self, dtype):
np.random.seed(42)
size = 10
inp = np.random.randint(0, size, (4096,), dtype=dtype)
np_weight = np.random.random((4096,))
np_out = np.ones((size,))
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(
input=inp, weights=np_weight, size=size, binary_output=True)))
def _test_bincount_col_count(self, num_rows, num_cols, size, dtype):
np.random.seed(42)
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate(
[np.bincount(inp[j, :], minlength=size) for j in range(num_rows)],
axis=0), (num_rows, size))
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(input=inp, weights=[], size=size)))
def _test_bincount_col_binary(self, num_rows, num_cols, size, dtype):
np.random.seed(42)
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate([
np.where(np.bincount(inp[j, :], minlength=size) > 0, 1, 0)
for j in range(num_rows)
],
axis=0), (num_rows, size))
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(
input=inp, weights=[], size=size, binary_output=True)))
def _test_bincount_col_count_with_weights(self, num_rows, num_cols, size,
dtype):
np.random.seed(42)
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_weight = np.random.random((num_rows, num_cols))
np_out = np.reshape(
np.concatenate([
np.bincount(inp[j, :], weights=np_weight[j, :], minlength=size)
for j in range(num_rows)
],
axis=0), (num_rows, size))
with test_util.use_gpu():
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.dense_bincount(
input=inp, weights=np_weight, size=size)))
def test_col_reduce_basic(self):
with test_util.use_gpu():
v = self.evaluate(
gen_math_ops.dense_bincount(
input=[[1, 2, 3], [0, 3, 2]], weights=[], size=4))
expected_out = [[0., 1., 1., 1.], [1., 0., 1., 1.]]
self.assertAllEqual(expected_out, v)
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_col_reduce_shared_memory(self, dtype):
# num_rows * num_bins less than half of max shared memory.
num_rows = 128
num_cols = 27
size = 10
self._test_bincount_col_count(num_rows, num_cols, size, dtype)
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_col_reduce_global_memory(self, dtype):
# num_rows * num_bins more than half of max shared memory.
num_rows = 128
num_cols = 27
size = 1024
self._test_bincount_col_count(num_rows, num_cols, size, dtype)
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_col_reduce_shared_memory_with_weights(self, dtype):
# num_rows * num_bins less than half of max shared memory.
num_rows = 128
num_cols = 27
size = 100
self._test_bincount_col_count_with_weights(num_rows, num_cols, size, dtype)
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_col_reduce_global_memory_with_weights(self, dtype):
# num_rows * num_bins more than half of max shared memory.
num_rows = 128
num_cols = 27
size = 1024
self._test_bincount_col_count_with_weights(num_rows, num_cols, size, dtype)
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_col_reduce_binary(self, dtype):
num_rows = 128
num_cols = 7
size = 10
self._test_bincount_col_binary(num_rows, num_cols, size, dtype)
def test_invalid_rank(self):
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"at most rank 2"):
with test_util.use_gpu():
self.evaluate(
gen_math_ops.dense_bincount(
input=[[[1, 2, 3], [0, 3, 2]]], weights=[], size=10))
class SparseBincountOpTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_all_count(self, dtype):
np.random.seed(42)
num_rows = 128
size = 1000
n_elems = 4096
inp_indices = np.random.randint(0, num_rows, (n_elems,))
inp_vals = np.random.randint(0, size, (n_elems,), dtype=dtype)
np_out = np.bincount(inp_vals, minlength=size)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_indices,
values=inp_vals,
dense_shape=[num_rows],
size=size,
weights=[])))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_all_count_with_weights(self, dtype):
np.random.seed(42)
num_rows = 128
size = 1000
n_elems = 4096
inp_indices = np.random.randint(0, num_rows, (n_elems,))
inp_vals = np.random.randint(0, size, (n_elems,), dtype=dtype)
inp_weight = np.random.random((n_elems,))
np_out = np.bincount(inp_vals, minlength=size, weights=inp_weight)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_indices,
values=inp_vals,
dense_shape=[num_rows],
size=size,
weights=inp_weight)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_all_binary(self, dtype):
np.random.seed(42)
num_rows = 128
size = 10
n_elems = 4096
inp_indices = np.random.randint(0, num_rows, (n_elems,))
inp_vals = np.random.randint(0, size, (n_elems,), dtype=dtype)
np_out = np.ones((size,))
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_indices,
values=inp_vals,
dense_shape=[num_rows],
size=size,
weights=[],
binary_output=True)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_all_binary_weights(self, dtype):
np.random.seed(42)
num_rows = 128
size = 10
n_elems = 4096
inp_indices = np.random.randint(0, num_rows, (n_elems,))
inp_vals = np.random.randint(0, size, (n_elems,), dtype=dtype)
inp_weight = np.random.random((n_elems,))
np_out = np.ones((size,))
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_indices,
values=inp_vals,
dense_shape=[num_rows],
size=size,
weights=inp_weight,
binary_output=True)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_col_reduce_count(self, dtype):
num_rows = 128
num_cols = 27
size = 100
np.random.seed(42)
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate(
[np.bincount(inp[j, :], minlength=size) for j in range(num_rows)],
axis=0), (num_rows, size))
# from_dense will filter out 0s.
inp = inp + 1
# from_dense will cause OOM in GPU.
with ops.device("/CPU:0"):
inp_sparse = sparse_ops.from_dense(inp)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_sparse.indices,
values=inp_sparse.values - 1,
dense_shape=inp_sparse.dense_shape,
size=size,
weights=[])))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_sparse_bincount_col_reduce_binary(self, dtype):
num_rows = 128
num_cols = 27
size = 100
np.random.seed(42)
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate([
np.where(np.bincount(inp[j, :], minlength=size) > 0, 1, 0)
for j in range(num_rows)
],
axis=0), (num_rows, size))
# from_dense will filter out 0s.
inp = inp + 1
# from_dense will cause OOM in GPU.
with ops.device("/CPU:0"):
inp_sparse = sparse_ops.from_dense(inp)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.sparse_bincount(
indices=inp_sparse.indices,
values=inp_sparse.values - 1,
dense_shape=inp_sparse.dense_shape,
size=size,
weights=[],
binary_output=True)))
class RaggedBincountOpTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_count(self, dtype):
x = ragged_factory_ops.constant([[], [], [3, 0, 1], [], [5, 0, 4, 4]])
expected_output = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0,
0], [1, 1, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 2, 1]]
self.assertAllEqual(
expected_output,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits, values=x.values, weights=[], size=6)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_binary(self, dtype):
x = ragged_factory_ops.constant([[], [], [3, 0, 1], [], [5, 0, 4, 4]])
expected_output = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0,
0], [1, 1, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 1, 1]]
self.assertAllEqual(
expected_output,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits,
values=x.values,
weights=[],
size=6,
binary_output=True)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_count_with_weights(self, dtype):
x = ragged_factory_ops.constant([[], [], [3, 0, 1], [], [5, 0, 4, 4]])
weights = ragged_factory_ops.constant([[], [], [.1, .2, .3], [],
[.2, .5, .6, .3]])
expected_output = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],
[.2, .3, 0, .1, 0, 0], [0, 0, 0, 0, 0, 0],
[.5, 0, 0, 0, .9, .2]]
self.assertAllClose(
expected_output,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits,
values=x.values,
weights=weights.values,
size=6)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_count_np(self, dtype):
np.random.seed(42)
num_rows = 128
num_cols = 27
size = 1000
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate(
[np.bincount(inp[j, :], minlength=size) for j in range(num_rows)],
axis=0), (num_rows, size))
x = ragged_tensor.RaggedTensor.from_tensor(inp)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits, values=x.values, weights=[], size=size)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_count_np_with_weights(self, dtype):
np.random.seed(42)
num_rows = 128
num_cols = 27
size = 1000
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_weight = np.random.random((num_rows, num_cols))
np_out = np.reshape(
np.concatenate([
np.bincount(inp[j, :], weights=np_weight[j, :], minlength=size)
for j in range(num_rows)
],
axis=0), (num_rows, size))
x = ragged_tensor.RaggedTensor.from_tensor(inp)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits,
values=x.values,
weights=np_weight,
size=size)))
@parameterized.parameters([{
"dtype": np.int32,
}, {
"dtype": np.int64,
}])
def test_ragged_bincount_binary_np_with_weights(self, dtype):
np.random.seed(42)
num_rows = 128
num_cols = 27
size = 1000
inp = np.random.randint(0, size, (num_rows, num_cols), dtype=dtype)
np_out = np.reshape(
np.concatenate([
np.where(np.bincount(inp[j, :], minlength=size) > 0, 1, 0)
for j in range(num_rows)
],
axis=0), (num_rows, size))
x = ragged_tensor.RaggedTensor.from_tensor(inp)
self.assertAllEqual(
np_out,
self.evaluate(
gen_math_ops.ragged_bincount(
splits=x.row_splits,
values=x.values,
weights=[],
size=size,
binary_output=True)))
if __name__ == "__main__":
googletest.main()
| 33.453416 | 80 | 0.593483 | 2,757 | 21,544 | 4.438157 | 0.085238 | 0.012586 | 0.015446 | 0.016018 | 0.835404 | 0.816525 | 0.775662 | 0.767898 | 0.71829 | 0.700392 | 0 | 0.042502 | 0.272651 | 21,544 | 643 | 81 | 33.505443 | 0.738354 | 0.058531 | 0 | 0.762411 | 0 | 0 | 0.015555 | 0 | 0 | 0 | 0 | 0 | 0.078014 | 1 | 0.060284 | false | 0 | 0.028369 | 0 | 0.095745 | 0.001773 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1391f576c88306f2c15e588625b71d8c8ca04c3b | 8,791 | py | Python | sep/shift_params.py | lukereichold/visual-speech-separation | d365083c5e76bc6c97c9642b0c2afc55e846ab38 | [
"Apache-2.0"
] | 2 | 2021-03-18T12:18:58.000Z | 2021-10-05T18:56:45.000Z | sep/shift_params.py | lukereichold/visual-speech-separation | d365083c5e76bc6c97c9642b0c2afc55e846ab38 | [
"Apache-2.0"
] | 10 | 2020-01-28T23:15:38.000Z | 2022-03-12T00:12:26.000Z | sep/shift_params.py | lukereichold/visual-speech-separation | d365083c5e76bc6c97c9642b0c2afc55e846ab38 | [
"Apache-2.0"
] | null | null | null | # this was trained using the old version of the loss, which had a separate cls_opt
import os, sys, utils.util as ut, tfutil as mu
import numpy as np
ab = os.path.abspath
pj = ut.pjoin
Params = mu.Params
def shift_lowfps(num_gpus = 1, shift_dur = 4.2):
total_dur = 10.1
fps = 29.97
frame_dur = 1./fps
samp_sr = 21000.
spec_sr = 100.
pr = Params(subsample_frames = 4,
train_iters = 100000,
opt_method = 'momentum',
base_lr = 1e-2,
full_model = True,
grad_clip = 5.,
skip_notfound = False,
augment_ims = True,
cam = False,
batch_size = int(15*num_gpus),
test_batch = 10,
shift_dur = shift_dur,
multipass = False,
both_examples = True,
small_augment = False,
resdir = ab('/data/scratch/owens/shift/shift-lowfps'),
init_path = None,
weight_decay = 1e-5,
train_list = '/data/ssd1/owens/audioset-vid-v21/small_train.txt',
test_list = '/data/ssd1/owens/audioset-vid-v21/small_train.txt',
num_dbs = None,
im_type = 'jpeg',
input_type = 'samples',
full_im_dim = 256,
full_flow_dim = 256,
crop_im_dim = 224,
sf_pad = int(0.5 * 2**4 * 4),
use_flow = False,
#renorm = True,
renorm = True,
checkpoint_iters = 1000,
dset_seed = None,
samp_sr = samp_sr,
spec_sr = spec_sr,
fps = fps,
#neg_frame_buf = -50,
max_intersection = 30*2,
specgram_sr = spec_sr,
num_mel = 64,
batch_norm = True,
show_videos = False,
check_iters = 1000,
decompress_flow = True,
print_iters = 10,
total_frames = int(total_dur*fps),
sampled_frames = int(shift_dur*fps),
full_specgram_samples = int(total_dur * spec_sr),
full_samples_len = int(total_dur * samp_sr),
sfs_per_frame = spec_sr * frame_dur,
samples_per_frame = samp_sr * frame_dur,
frame_sample_delta = int(total_dur*fps)/2,
fix_frame = False,
use_3d = True,
augment = False,
dilate = False,
do_shift = True,
variable_frame_count = False,
momentum_rate = 0.9,
use_sound = True,
bn_last = True,
summary_iters = 10,
im_split = True,
num_splits = 2,
augment_audio = False,
multi_shift = False,
)
pr.vis_dir = ut.mkdir(pj(pr.resdir, 'vis'))
return pr
def shift_v1(num_gpus = 1, shift_dur = 4.2):
total_dur = 10.1
fps = 29.97
frame_dur = 1./fps
samp_sr = 21000.
spec_sr = 100.
pr = Params(subsample_frames = None,
train_iters = 100000,
opt_method = 'momentum',
base_lr = 1e-2,
full_model = True,
grad_clip = 5.,
skip_notfound = False,
augment_ims = True,
init_path = '/data/scratch/owens/shift/shift-lowfps/training/net.tf-30000',
cam = False,
batch_size = int(5*num_gpus),
test_batch = 10,
shift_dur = shift_dur,
multipass = False,
both_examples = True,
small_augment = False,
resdir = ab('/data/scratch/owens/shift/shift-v1'),
#init_path = None,
weight_decay = 1e-5,
# train_list = '/data/ssd1/owens/audioset-vid-v21/small_train.txt',
# test_list = '/data/ssd1/owens/audioset-vid-v21/small_train.txt',
train_list = '/data/scratch/owens/audioset-vid-v21/train_tfs.txt',
test_list = '/data/scratch/owens/audioset-vid-v21/test_tfs.txt',
num_dbs = None,
im_type = 'jpeg',
input_type = 'samples',
full_im_dim = 256,
full_flow_dim = 256,
crop_im_dim = 224,
sf_pad = int(0.5 * 2**4 * 4),
use_flow = False,
#renorm = True,
renorm = True,
checkpoint_iters = 1000,
dset_seed = None,
samp_sr = samp_sr,
spec_sr = spec_sr,
fps = fps,
#neg_frame_buf = -50,
max_intersection = 30*2,
specgram_sr = spec_sr,
num_mel = 64,
batch_norm = True,
show_videos = False,
check_iters = 1000,
decompress_flow = True,
print_iters = 10,
total_frames = int(total_dur*fps),
sampled_frames = int(shift_dur*fps),
full_specgram_samples = int(total_dur * spec_sr),
full_samples_len = int(total_dur * samp_sr),
sfs_per_frame = spec_sr * frame_dur,
samples_per_frame = samp_sr * frame_dur,
frame_sample_delta = int(total_dur*fps)/2,
fix_frame = False,
use_3d = True,
augment = False,
dilate = False,
do_shift = True,
variable_frame_count = False,
momentum_rate = 0.9,
use_sound = True,
bn_last = True,
summary_iters = 10,
im_split = True,
num_splits = 2,
augment_audio = False,
multi_shift = False,
model_iter = 650000,
)
pr.vid_dur = pr.shift_dur
pr.num_samples = int(round(pr.samples_per_frame*pr.sampled_frames))
#pr.vis_dir = ut.mkdir(pj(pr.resdir, 'vis'))
return pr
def cam_v1(num_gpus = 1, shift_dur = 4.2):
total_dur = 10.1
fps = 29.97
frame_dur = 1./fps
samp_sr = 21000.
spec_sr = 100.
pr = Params(subsample_frames = None,
train_iters = 100000,
opt_method = 'momentum',
base_lr = 1e-2,
full_model = True,
grad_clip = 5.,
skip_notfound = False,
augment_ims = True,
init_path = '../results/nets/shift/net.tf-650000',
cam = True,
batch_size = int(5*num_gpus),
test_batch = 10,
shift_dur = shift_dur,
multipass = False,
both_examples = True,
small_augment = False,
resdir = ab('/data/scratch/owens/shift/cam-v1'),
weight_decay = 1e-5,
train_list = '/data/scratch/owens/audioset-vid-v21/train_tfs.txt',
test_list = '/data/scratch/owens/audioset-vid-v21/test_tfs.txt',
num_dbs = None,
im_type = 'jpeg',
input_type = 'samples',
full_im_dim = 256,
full_flow_dim = 256,
crop_im_dim = 224,
sf_pad = int(0.5 * 2**4 * 4),
use_flow = False,
#renorm = True,
renorm = True,
checkpoint_iters = 1000,
dset_seed = None,
samp_sr = samp_sr,
spec_sr = spec_sr,
fps = fps,
#neg_frame_buf = -50,
max_intersection = 30*2,
specgram_sr = spec_sr,
num_mel = 64,
batch_norm = True,
show_videos = False,
check_iters = 1000,
decompress_flow = True,
print_iters = 10,
total_frames = int(total_dur*fps),
sampled_frames = int(shift_dur*fps),
full_specgram_samples = int(total_dur * spec_sr),
full_samples_len = int(total_dur * samp_sr),
sfs_per_frame = spec_sr * frame_dur,
samples_per_frame = samp_sr * frame_dur,
frame_sample_delta = int(total_dur*fps)/2,
fix_frame = False,
use_3d = True,
augment = False,
dilate = False,
do_shift = True,
variable_frame_count = False,
momentum_rate = 0.9,
use_sound = True,
bn_last = True,
summary_iters = 10,
im_split = True,
num_splits = 2,
augment_audio = False,
multi_shift = False,
model_iter = 675000,
)
pr.vid_dur = pr.shift_dur
pr.num_samples = int(round(pr.samples_per_frame*pr.sampled_frames))
return pr
| 33.942085 | 89 | 0.493914 | 1,009 | 8,791 | 4.007929 | 0.169475 | 0.026706 | 0.032641 | 0.037587 | 0.936944 | 0.93002 | 0.920623 | 0.917161 | 0.917161 | 0.917161 | 0 | 0.051112 | 0.416904 | 8,791 | 258 | 90 | 34.073643 | 0.737807 | 0.04243 | 0 | 0.888889 | 0 | 0 | 0.066024 | 0.058887 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013333 | false | 0.013333 | 0.008889 | 0 | 0.035556 | 0.013333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13995f08746464b925b45821ed1d668a7180885e | 13,590 | py | Python | tests/test_cache_invalidation.py | d-chugunov/asyncpg | bd26b0e69d6341b9959e3c3ef8cd65dc910f6dad | [
"Apache-2.0"
] | null | null | null | tests/test_cache_invalidation.py | d-chugunov/asyncpg | bd26b0e69d6341b9959e3c3ef8cd65dc910f6dad | [
"Apache-2.0"
] | null | null | null | tests/test_cache_invalidation.py | d-chugunov/asyncpg | bd26b0e69d6341b9959e3c3ef8cd65dc910f6dad | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
import asyncpg
from asyncpg import _testbase as tb
ERRNUM = 'unexpected number of attributes of composite type'
ERRTYP = 'unexpected data type of composite type'
class TestCacheInvalidation(tb.ConnectedTestCase):
async def test_prepare_cache_invalidation_silent(self):
await self.con.execute('CREATE TABLE tab1(a int, b int)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, 2)')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, 2))
await self.con.execute(
'ALTER TABLE tab1 ALTER COLUMN b SET DATA TYPE text')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, '2'))
finally:
await self.con.execute('DROP TABLE tab1')
async def test_prepare_cache_invalidation_in_transaction(self):
await self.con.execute('CREATE TABLE tab1(a int, b int)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, 2)')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, 2))
await self.con.execute(
'ALTER TABLE tab1 ALTER COLUMN b SET DATA TYPE text')
with self.assertRaisesRegex(asyncpg.InvalidCachedStatementError,
'cached statement plan is invalid'):
async with self.con.transaction():
result = await self.con.fetchrow('SELECT * FROM tab1')
# This is now OK,
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, '2'))
finally:
await self.con.execute('DROP TABLE tab1')
async def test_prepare_cache_invalidation_in_pool(self):
pool = await self.create_pool(database='postgres',
min_size=2, max_size=2)
await self.con.execute('CREATE TABLE tab1(a int, b int)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, 2)')
con1 = await pool.acquire()
con2 = await pool.acquire()
result = await con1.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, 2))
result = await con2.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, 2))
await self.con.execute(
'ALTER TABLE tab1 ALTER COLUMN b SET DATA TYPE text')
# con1 tries the same plan, will invalidate the cache
# for the entire pool.
result = await con1.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, '2'))
async with con2.transaction():
# This should work, as con1 should have invalidated
# the plan cache.
result = await con2.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, '2'))
finally:
await self.con.execute('DROP TABLE tab1')
await pool.close()
async def test_type_cache_invalidation_in_transaction(self):
await self.con.execute('CREATE TYPE typ1 AS (x int, y int)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
async with self.con.transaction():
await self.con.execute('ALTER TYPE typ1 ADD ATTRIBUTE c text')
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await self.con.fetchrow('SELECT * FROM tab1')
# The second request must be correct (cache was dropped):
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, None)))
# This is now OK, the cache is actual after the transaction.
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, None)))
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
async def test_type_cache_invalidation_in_cancelled_transaction(self):
await self.con.execute('CREATE TYPE typ1 AS (x int, y int)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
try:
async with self.con.transaction():
await self.con.execute(
'ALTER TYPE typ1 ADD ATTRIBUTE c text')
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await self.con.fetchrow('SELECT * FROM tab1')
# The second request must be correct (cache was dropped):
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, None)))
raise UserWarning # Just to generate ROLLBACK
except UserWarning:
pass
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await self.con.fetchrow('SELECT * FROM tab1')
# This is now OK, the cache is filled after being dropped.
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
async def test_prepared_type_cache_invalidation(self):
await self.con.execute('CREATE TYPE typ1 AS (x int, y int)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
prep = await self.con._prepare('SELECT * FROM tab1',
use_cache=True)
result = await prep.fetchrow()
self.assertEqual(result, (1, (2, 3)))
try:
async with self.con.transaction():
await self.con.execute(
'ALTER TYPE typ1 ADD ATTRIBUTE c text')
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await prep.fetchrow()
# PS has its local cache for types codecs, even after the
# cache cleanup it is not possible to use it.
# That's why it is marked as closed.
with self.assertRaisesRegex(
asyncpg.InterfaceError,
'the prepared statement is closed'):
await prep.fetchrow()
prep = await self.con._prepare('SELECT * FROM tab1',
use_cache=True)
# The second PS must be correct (cache was dropped):
result = await prep.fetchrow()
self.assertEqual(result, (1, (2, 3, None)))
raise UserWarning # Just to generate ROLLBACK
except UserWarning:
pass
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await prep.fetchrow()
# Reprepare it again after dropping cache.
prep = await self.con._prepare('SELECT * FROM tab1',
use_cache=True)
# This is now OK, the cache is filled after being dropped.
result = await prep.fetchrow()
self.assertEqual(result, (1, (2, 3)))
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
async def test_type_cache_invalidation_on_drop_type_attr(self):
await self.con.execute('CREATE TYPE typ1 AS (x int, y int, c text)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute(
'INSERT INTO tab1 VALUES (1, (2, 3, $1))', 'x')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, 'x')))
await self.con.execute('ALTER TYPE typ1 DROP ATTRIBUTE x')
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await self.con.fetchrow('SELECT * FROM tab1')
# This is now OK, the cache is filled after being dropped.
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (3, 'x')))
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
async def test_type_cache_invalidation_on_change_attr(self):
await self.con.execute('CREATE TYPE typ1 AS (x int, y int)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
# It is slightly artificial, but can take place in transactional
# schema changing. Nevertheless, if the code checks and raises it
# the most probable reason is a difference with the cache type.
await self.con.execute('ALTER TYPE typ1 DROP ATTRIBUTE y')
await self.con.execute('ALTER TYPE typ1 ADD ATTRIBUTE y bigint')
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRTYP):
await self.con.fetchrow('SELECT * FROM tab1')
# This is now OK, the cache is filled after being dropped.
result = await self.con.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, None)))
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
async def test_type_cache_invalidation_in_pool(self):
await self.con.execute('CREATE DATABASE testdb')
pool = await self.create_pool(database='postgres',
min_size=2, max_size=2)
pool_chk = await self.create_pool(database='testdb',
min_size=2, max_size=2)
await self.con.execute('CREATE TYPE typ1 AS (x int, y int)')
await self.con.execute('CREATE TABLE tab1(a int, b typ1)')
try:
await self.con.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
con1 = await pool.acquire()
con2 = await pool.acquire()
result = await con1.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
result = await con2.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
# Create the same schema in the "testdb", fetch data which caches
# type info.
con_chk = await pool_chk.acquire()
await con_chk.execute('CREATE TYPE typ1 AS (x int, y int)')
await con_chk.execute('CREATE TABLE tab1(a int, b typ1)')
await con_chk.execute('INSERT INTO tab1 VALUES (1, (2, 3))')
result = await con_chk.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3)))
# Change schema in the databases.
await self.con.execute('ALTER TYPE typ1 ADD ATTRIBUTE c text')
await con_chk.execute('ALTER TYPE typ1 ADD ATTRIBUTE c text')
# con1 tries to get cached type info, fails, but invalidates the
# cache for the entire pool.
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await con1.fetchrow('SELECT * FROM tab1')
async with con2.transaction():
# This should work, as con1 should have invalidated all caches.
result = await con2.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, None)))
# After all the con1 uses actual info from renewed cache entry.
result = await con1.fetchrow('SELECT * FROM tab1')
self.assertEqual(result, (1, (2, 3, None)))
# Check the invalidation is database-specific, i.e. cache entries
# for pool_chk/con_chk was not dropped via pool/con1.
with self.assertRaisesRegex(
asyncpg.OutdatedSchemaCacheError, ERRNUM):
await con_chk.fetchrow('SELECT * FROM tab1')
finally:
await self.con.execute('DROP TABLE tab1')
await self.con.execute('DROP TYPE typ1')
await pool.close()
await pool_chk.close()
await self.con.execute('DROP DATABASE testdb')
| 43.980583 | 79 | 0.567623 | 1,601 | 13,590 | 4.771393 | 0.124297 | 0.071475 | 0.116246 | 0.126849 | 0.824846 | 0.80089 | 0.782694 | 0.755858 | 0.744207 | 0.719204 | 0 | 0.024983 | 0.337307 | 13,590 | 308 | 80 | 44.123377 | 0.823229 | 0.122222 | 0 | 0.816901 | 0 | 0 | 0.200723 | 0 | 0 | 0 | 0 | 0 | 0.173709 | 1 | 0 | false | 0.00939 | 0.00939 | 0 | 0.014085 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13aa86042379d9c7d81b5bc081933c0b0f113ee5 | 3,990 | py | Python | experiments/plot_performance.py | MichaelAllen1966/qambo | be91110d97cec690133005be82bedf94e5029db5 | [
"MIT"
] | 7 | 2021-06-29T06:52:59.000Z | 2021-12-14T21:23:08.000Z | experiments/plot_performance.py | MichaelAllen1966/qambo | be91110d97cec690133005be82bedf94e5029db5 | [
"MIT"
] | null | null | null | experiments/plot_performance.py | MichaelAllen1966/qambo | be91110d97cec690133005be82bedf94e5029db5 | [
"MIT"
] | 3 | 2021-09-06T22:14:42.000Z | 2022-01-26T18:05:33.000Z | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
results_files = {
'random': 'results_random_action.csv',
'ddqn': 'results_ddqn.csv',
'3dqn': 'results_d3qn.csv',
'noisy 3dqn': 'results_noisy_d3qn.csv',
'pr 3dqn': 'results_pr_d3qn.csv',
'pr noisy 3dqn': 'results_pr_noisy_d3qn.csv',
'bagging ddqn': 'results_bagging_ddqn.csv',
'bagging 3dqn': 'results_bagging_3dqn.csv',
'bagging noisy 3dqn': 'results_bagging_noisy_d3qn.csv',
'bagging pr noisy 3dqn': 'results_bagging_pr_noisy_d3qn.csv'
}
# Call to response
index = 0
fig = plt.figure(figsize=(14,7))
ax1 = fig.add_subplot(131)
path = './1_incident_points_3_ambo/output/'
x = []
labels = []
for key, value in results_files.items():
index += 1
labels.append(str(index) + ': ' + key)
filename = path + value
df = pd.read_csv(filename)
x.append(df['call_to_arrival'].values)
ax1.boxplot(x, whis=1000, widths=0.8)
ax1.set_yticks(np.arange(10,32,2))
ax1.set_ylabel('Call to arrival (minutes)')
ax1.set_title('1 incident area, 3 ambulances')
ax2 = fig.add_subplot(132)
path = './2_incident_points_6_ambo/output/'
x = []
for key, value in results_files.items():
filename = path + value
df = pd.read_csv(filename)
x.append(df['call_to_arrival'].values)
ax2.boxplot(x, whis=1000, widths=0.8)
ax2.set_yticks(np.arange(10,32,2))
ax2.set_ylabel('Call to arrival (minutes)')
ax2.set_title('2 incident areas, 6 ambulances')
ax3 = fig.add_subplot(133)
path = './3_incident_points_9_ambo/output/'
x = []
for key, value in results_files.items():
filename = path + value
df = pd.read_csv(filename)
x.append(df['call_to_arrival'].values)
ax3.boxplot(x, whis=1000, widths=0.8)
ax3.set_yticks(np.arange(10,32,2))
ax3.set_ylabel('Call to arrival (minutes)')
ax3.set_title('3 incident areas, 9 ambulances')
leg = fig.legend(labels, handlelength=0, handletextpad=0, fancybox=True,
ncol=5, loc='upper center', bbox_to_anchor=(0.5, 0.03),
markerscale=0.0)
for item in leg.legendHandles:
item.set_visible(False)
plt.tight_layout(pad=2)
plt.savefig('call_to_arrival.jpg', dpi=300, bbox_inches='tight', pad_inches=0.5)
plt.show()
############################################################################### Assignment to response
index = 0
fig = plt.figure(figsize=(14,7))
ax1 = fig.add_subplot(131)
path = './1_incident_points_3_ambo/output/'
x = []
labels = []
for key, value in results_files.items():
index += 1
labels.append(str(index) + ': ' + key)
filename = path + value
df = pd.read_csv(filename)
x.append(df['assign_to_arrival'].values)
ax1.boxplot(x, whis=1000, widths=0.8)
ax1.set_yticks(np.arange(8,30,2))
ax1.set_ylabel('Call to response (minutes)')
ax1.set_title('1 incident area, 3 ambulances')
ax2 = fig.add_subplot(132)
path = './2_incident_points_6_ambo/output/'
x = []
for key, value in results_files.items():
filename = path + value
df = pd.read_csv(filename)
x.append(df['assign_to_arrival'].values)
ax2.boxplot(x, whis=1000, widths=0.8)
ax2.set_yticks(np.arange(8,30,2))
ax2.set_ylabel('Call to response (minutes)')
ax2.set_title('2 incident areas, 6 ambulances')
ax3 = fig.add_subplot(133)
path = './3_incident_points_9_ambo/output/'
x = []
for key, value in results_files.items():
filename = path + value
df = pd.read_csv(filename)
x.append(df['assign_to_arrival'].values)
ax3.boxplot(x, whis=1000, widths=0.8)
ax3.set_yticks(np.arange(8,30,2))
ax3.set_ylabel('Call to response (minutes)')
ax3.set_title('3 incident areas, 9 ambulances')
leg = fig.legend(labels, handlelength=0, handletextpad=0, fancybox=True,
ncol=5, loc='upper center', bbox_to_anchor=(0.5, 0.03),
markerscale=0.0)
for item in leg.legendHandles:
item.set_visible(False)
plt.tight_layout(pad=2)
plt.savefig('assign_to_arrival.jpg', dpi=300, bbox_inches='tight', )
plt.show()
| 27.517241 | 102 | 0.672431 | 620 | 3,990 | 4.15 | 0.175806 | 0.025651 | 0.035367 | 0.030315 | 0.837932 | 0.837932 | 0.778469 | 0.767975 | 0.742324 | 0.742324 | 0 | 0.05625 | 0.157895 | 3,990 | 144 | 103 | 27.708333 | 0.709524 | 0.009774 | 0 | 0.728972 | 0 | 0 | 0.271318 | 0.105426 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.028037 | 0 | 0.028037 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13aaab8c073d48dbcda80c57d207cd9e4384b902 | 113 | py | Python | boa3_test/test_sc/bytes_test/BytesToBoolWithBuiltin.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 25 | 2020-07-22T19:37:43.000Z | 2022-03-08T03:23:55.000Z | boa3_test/test_sc/bytes_test/BytesToBoolWithBuiltin.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 419 | 2020-04-23T17:48:14.000Z | 2022-03-31T13:17:45.000Z | boa3_test/test_sc/bytes_test/BytesToBoolWithBuiltin.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 15 | 2020-05-21T21:54:24.000Z | 2021-11-18T06:17:24.000Z | from boa3.builtin import public
@public
def bytes_to_bool(args: bytes) -> bool:
return bytes.to_bool(args)
| 16.142857 | 39 | 0.743363 | 18 | 113 | 4.5 | 0.611111 | 0.17284 | 0.271605 | 0.37037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010526 | 0.159292 | 113 | 6 | 40 | 18.833333 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
13aeed87550c680a3f89165fd33444834ae594c9 | 40 | py | Python | tests/torchaddons/test_init.py | jakkes/torchaddons | 3a93b4c3ad026e208a3836c61159d9fd54694dd2 | [
"MIT"
] | null | null | null | tests/torchaddons/test_init.py | jakkes/torchaddons | 3a93b4c3ad026e208a3836c61159d9fd54694dd2 | [
"MIT"
] | null | null | null | tests/torchaddons/test_init.py | jakkes/torchaddons | 3a93b4c3ad026e208a3836c61159d9fd54694dd2 | [
"MIT"
] | null | null | null | def test_init():
import torchaddons
| 13.333333 | 22 | 0.725 | 5 | 40 | 5.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 40 | 2 | 23 | 20 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0.5 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b941efaef564a194b72f6987fec3f31be59d84f9 | 434 | py | Python | data_resource/generator/api_manager/v1_0_0/crud_functions/__init__.py | brighthive/data-resource-generator | 66154c5954ac287f2af25359b2abb0abb417b265 | [
"MIT"
] | 2 | 2020-06-12T13:43:42.000Z | 2020-06-28T00:25:40.000Z | data_resource/generator/api_manager/v1_0_0/crud_functions/__init__.py | brighthive/data-resource-generator | 66154c5954ac287f2af25359b2abb0abb417b265 | [
"MIT"
] | 3 | 2020-07-01T00:17:48.000Z | 2020-09-21T21:16:39.000Z | data_resource/generator/api_manager/v1_0_0/crud_functions/__init__.py | brighthive/data-resource-generator | 66154c5954ac287f2af25359b2abb0abb417b265 | [
"MIT"
] | null | null | null | from data_resource.generator.api_manager.v1_0_0.crud_functions.resource_read import (
ResourceRead,
)
from data_resource.generator.api_manager.v1_0_0.crud_functions.resource_create import (
ResourceCreate,
)
from data_resource.generator.api_manager.v1_0_0.crud_functions.resource_update import (
ResourceUpdate,
)
from data_resource.generator.api_manager.v1_0_0.crud_functions.resource_query import (
ResourceQuery,
)
| 33.384615 | 87 | 0.834101 | 60 | 434 | 5.633333 | 0.316667 | 0.094675 | 0.189349 | 0.295858 | 0.710059 | 0.710059 | 0.710059 | 0.710059 | 0.710059 | 0.710059 | 0 | 0.030457 | 0.092166 | 434 | 12 | 88 | 36.166667 | 0.827411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
b958571eb5909dde55f9968692eaa48c26840521 | 165,363 | py | Python | resources_rc.py | ksk102/RSA-Digital_Signature | 568eac43301b83bfb42b735a69976061031c8a89 | [
"MIT"
] | null | null | null | resources_rc.py | ksk102/RSA-Digital_Signature | 568eac43301b83bfb42b735a69976061031c8a89 | [
"MIT"
] | 1 | 2019-11-19T13:38:31.000Z | 2019-11-19T13:38:56.000Z | resources_rc.py | ksk102/RSA-Digital_Signature | 568eac43301b83bfb42b735a69976061031c8a89 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.13.0)
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore
qt_resource_data = b"\
\x00\x00\x9b\x37\
\x00\
\x00\x01\x00\x05\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x56\x00\x00\x00\x18\x18\x00\x00\x01\x00\x20\x00\x88\x09\x00\
\x00\xbe\x04\x00\x00\x20\x20\x00\x00\x01\x00\x20\x00\xa8\x10\x00\
\x00\x46\x0e\x00\x00\x30\x30\x00\x00\x01\x00\x20\x00\xa8\x25\x00\
\x00\xee\x1e\x00\x00\x00\x00\x00\x00\x01\x00\x20\x00\xa1\x56\x00\
\x00\x96\x44\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x04\x00\x00\x86\x0b\x00\
\x00\x86\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa4\
\x00\xf5\x63\xa4\x00\xf5\x63\xa3\x00\xf5\x63\xa3\x00\xf5\x63\xa3\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x33\xf5\x63\xa1\x7f\xf5\x63\xa1\
\x76\xf5\x63\xa1\x1e\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x64\xa3\
\x00\xf5\x63\xa1\x3f\xf5\x63\xa1\x55\xf5\x63\xa1\x51\xf5\x63\xa1\
\x51\xf5\x63\xa1\x4f\xf5\x63\xa1\x6a\xf5\x63\xa1\x76\xf5\x63\xa1\
\x50\xf5\x63\xa1\x5b\xf5\x63\xa1\x04\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x03\xf5\x63\xa1\x58\xf5\x63\xa1\x1f\xf5\x63\xa1\x12\xf5\x63\xa1\
\x13\xf5\x63\xa1\x13\xf5\x63\xa1\x11\xf5\x63\xa1\x28\xf5\x63\xa1\
\x5b\xf5\x63\xa1\x54\xf5\x63\xa1\x46\xf5\x63\xa1\x1e\xf5\x63\xa1\
\x4f\xf5\x63\xa1\x13\xf5\x63\xa1\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x01\xf5\x63\xa1\x54\xf5\x63\xa1\x15\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x21\xf5\x63\xa1\x5a\xf5\x63\xa1\x55\xf5\x63\xa1\x5f\xf5\x63\xa1\
\x42\xf5\x63\xa1\x57\xf5\x63\xa1\x07\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x2d\xf5\x63\xa1\x59\xf5\x63\xa1\x2f\xf5\x63\xa1\
\x0f\xf5\x63\xa1\x0c\xf5\x63\xa1\x19\xf5\x63\xa1\x0d\xf5\x63\xa1\
\x0b\xf5\x63\xa1\x56\xf5\x63\xa1\x53\xf5\x63\xa1\x2b\xf5\x63\xa1\
\x6c\xf5\x63\xa1\x6f\xf5\x63\xa1\x43\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x01\xf5\x63\xa1\x19\xf5\x63\xa1\x3f\xf5\x64\xa2\
\x61\xf4\x64\xa2\x6a\xf5\x63\xa1\x51\xf5\x64\xa2\x68\xf5\x64\xa2\
\x69\xf4\x64\xa2\x8d\xf4\x65\xa3\x30\xf4\x65\xa3\x76\xf4\x65\xa2\
\x7c\xf4\x65\xa2\x70\xf5\x63\xa1\x36\xf5\x64\xa2\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xef\x72\xae\x00\xf2\x6a\xa7\x00\xf1\x6d\xaa\
\x3c\xf1\x6c\xa9\x7c\xf1\x6c\xa9\x51\xf1\x6c\xa9\x79\xf1\x6d\xa9\
\x46\xf2\x6a\xa7\x38\xf1\x6d\xa9\x62\xf2\x6b\xa8\x75\xf1\x6c\xa9\
\x70\xf3\x69\xa6\x2b\xd8\xaa\xe0\x00\xf4\x65\xa3\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xed\x77\xb3\x00\xed\x77\xb3\x1d\xee\x74\xb0\
\x5c\xf0\x6f\xab\x1f\xf1\x6c\xa9\x09\xf0\x6f\xab\x1b\xee\x73\xaf\
\x5a\xed\x77\xb3\x23\xf0\x70\xad\x28\xee\x73\xaf\x67\xef\x72\xae\
\x36\xeb\x7a\xb6\x00\xf1\x6e\xab\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe7\x85\xc0\x00\xf7\x5c\x9a\x00\xea\x7f\xb9\x51\xeb\x7b\xb7\
\x21\xeb\x7c\xb7\x00\xe8\x80\xbf\x00\xec\x7b\xb6\x00\xeb\x7b\xb6\
\x1a\xea\x7f\xb9\x55\xe6\x8a\xc3\x02\xed\x76\xb1\x08\xed\x76\xb2\
\x02\xed\x76\xb2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe5\x8b\xc5\x00\xe5\x8c\xc5\x25\xe7\x87\xc1\x57\xe9\x81\xbb\
\x04\xe9\x82\xbd\x00\x00\x00\x00\x00\xe9\x81\xbc\x00\xeb\x7c\xb8\
\x02\xe7\x87\xc1\x53\xe5\x8b\xc5\x2c\xe6\x8a\xc4\x00\xe3\x92\xca\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe2\x93\xcb\x00\xe3\x93\xcb\x45\xe1\x95\xce\x58\xe0\x9a\xd2\
\x09\xe0\x99\xd1\x00\xdf\x9c\xd4\x04\xe0\x9a\xd1\x17\xe0\x99\xd1\
\x2c\xe1\x96\xce\x54\xe2\x93\xcb\x4d\xe2\x94\xcc\x00\xe3\x92\xca\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xdf\x9c\xd3\x00\xdf\x9c\xd3\x18\xde\x9d\xd5\x94\xde\xa0\xd7\
\x38\xdc\xa4\xdb\x25\xde\x9f\xd6\x56\xde\x9e\xd5\x70\xde\x9d\xd5\
\x8b\xdf\x9d\xd4\x9b\xdf\x9c\xd4\x20\xdf\x9d\xd4\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xda\xa9\xdf\x00\xda\xa9\xe0\x10\xda\xa8\xdf\x73\xda\xa8\xde\
\x64\xda\xa8\xde\x7e\xda\xa9\xdf\x77\xda\xaa\xe0\x7f\xdb\xa7\xde\
\x29\xda\xa8\xdf\x51\xda\xa9\xdf\x18\xda\xa9\xdf\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd7\xb1\xe7\x00\xd6\xb1\xe7\x09\xd5\xb4\xea\x59\xd6\xb3\xe9\
\x0c\xd9\xab\xe1\x07\xd9\xad\xe3\x16\xd7\xb0\xe6\x3a\xd5\xb4\xea\
\x47\xd5\xb5\xea\x5b\xd6\xb2\xe8\x0d\xd6\xb1\xe7\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\x3e\xd3\xba\xef\
\x43\xd3\xbb\xef\x04\xd3\xbb\xf0\x00\xd1\xbe\xf2\x02\xd3\xba\xef\
\x41\xd3\xba\xef\x45\xd3\xbb\xf0\x00\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x06\xd3\xba\xef\
\x4b\xd3\xba\xef\x64\xd3\xba\xef\x55\xd3\xba\xef\x62\xd3\xba\xef\
\x50\xd3\xba\xef\x08\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x3f\x00\
\x00\x80\x1f\x00\x00\x00\x03\x00\x00\x1f\x01\x00\x00\x80\x01\x00\
\x00\x80\x01\x00\x00\xf0\x03\x00\x00\xe0\x07\x00\x00\xe7\x07\x00\
\x00\xc7\x1f\x00\x00\xc4\x1f\x00\x00\xc0\x1f\x00\x00\xc0\x1f\x00\
\x00\xc0\x1f\x00\x00\xe2\x3f\x00\x00\xe0\x3f\x00\x00\x28\x00\x00\
\x00\x18\x00\x00\x00\x30\x00\x00\x00\x01\x00\x20\x00\x00\x00\x00\
\x00\x00\x09\x00\x00\x86\x0b\x00\x00\x86\x0b\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x06\xf5\x63\xa1\
\x84\xf5\x63\xa1\x9f\xf5\x63\xa1\x87\xf5\x63\xa1\x2b\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x08\xf5\x63\xa1\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\
\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\x0c\xf5\x63\xa1\x1a\xf5\x63\xa1\
\x93\xf5\x63\xa1\x2f\xf5\x63\xa1\x68\xf5\x63\xa1\x9f\xf5\x63\xa1\
\x1b\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\
\x83\xf5\x63\xa1\x80\xf5\x63\xa1\x7d\xf5\x63\xa1\x7d\xf5\x63\xa1\
\x7d\xf5\x63\xa1\x7d\xf5\x63\xa1\x7d\xf5\x63\xa1\x7d\xf5\x63\xa1\
\xa1\xf5\x63\xa1\x90\xf5\x63\xa1\x1e\xf5\x63\xa1\x6f\xf5\x63\xa1\
\x5b\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x17\xf5\x63\xa1\
\x7e\xf5\x63\xa1\x12\xf5\x63\xa1\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\
\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\x0d\xf5\x63\xa1\
\x09\xf5\x63\xa1\x3c\xf5\x63\xa1\x82\xf5\x63\xa1\x29\xf5\x63\xa1\
\x75\xf5\x63\xa1\x50\xf5\x63\xa1\x01\xf5\x63\xa1\x05\xf5\x63\xa1\
\x3a\xf5\x63\xa1\x1d\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x17\xf5\x63\xa1\
\x7a\xf5\x63\xa1\x05\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x3a\xf5\x63\xa1\x82\xf5\x63\xa1\
\x29\xf5\x63\xa1\x75\xf5\x63\xa1\x58\xf5\x63\xa1\x67\xf5\x63\xa1\
\x84\xf5\x63\xa1\x80\xf5\x63\xa1\x12\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x0f\xf5\x63\xa1\
\x81\xf5\x63\xa1\x11\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x3a\xf5\x63\xa1\
\x81\xf5\x63\xa1\x29\xf5\x63\xa1\x75\xf5\x63\xa1\x58\xf5\x63\xa1\
\x04\xf5\x63\xa1\x4f\xf5\x63\xa1\x6f\xf5\x63\xa1\x05\xf5\x63\xa1\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x59\xf5\x63\xa1\x6f\xf5\x63\xa1\x1d\xf5\x63\xa1\x03\xf5\x63\xa1\
\x00\xf3\x61\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x49\xf5\x63\xa1\x87\xf5\x63\xa1\x08\xf5\x63\xa1\x15\xf5\x63\xa1\
\x60\xf5\x63\xa1\x6f\xf5\x63\xa1\x72\xf5\x63\xa1\x50\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x06\xf5\x63\xa1\x4b\xf5\x63\xa1\x7d\xf5\x63\xa1\x78\xf5\x63\xa1\
\x4f\xf5\x63\xa1\x27\xf5\x63\xa1\x46\xf5\x63\xa1\x60\xf5\x63\xa1\
\x5a\xf5\x63\xa1\x34\xf5\x63\xa1\x32\xf5\x63\xa1\x68\xf5\x63\xa1\
\xa7\xf5\x63\xa1\x59\xf5\x63\xa1\x14\xf5\x63\xa1\x81\xf5\x63\xa1\
\x7a\xf5\x63\xa1\x97\xf5\x63\xa1\x4d\xf5\x63\xa1\x8f\xf5\x63\xa1\
\x09\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x06\xf5\x63\xa1\x25\xf5\x63\xa1\
\x56\xf4\x65\xa3\xa3\xf5\x64\xa2\x65\xf5\x62\xa1\x3a\xf5\x63\xa1\
\x40\xf4\x65\xa2\x8c\xf4\x65\xa2\x86\xf5\x64\xa2\x6d\xf4\x65\xa3\
\x98\xf4\x65\xa3\x07\xf4\x66\xa4\x5b\xf4\x66\xa3\x81\xf4\x65\xa3\
\x7f\xf4\x66\xa3\x66\xf4\x65\xa3\x83\xf5\x63\xa1\x2e\xf4\x67\xa4\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x6c\xa9\x00\xf2\x6b\xa8\
\x02\xf2\x6b\xa8\x82\xf1\x6c\xa9\x67\xf1\x6c\xa9\x64\xf1\x6d\xa9\
\x5d\xf2\x6c\xa8\x87\xf2\x6b\xa8\x46\xf3\x66\xa4\x0b\xf2\x6a\xa7\
\x72\xf1\x6c\xa9\x61\xf2\x6a\xa7\x62\xf2\x6a\xa7\x95\xf2\x6b\xa8\
\x64\xf2\x6a\xa7\x7e\xf3\x67\xa5\x1a\xf3\x68\xa6\x00\xf3\x68\xa5\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xef\x72\xaf\x00\xb0\xf6\xff\x00\xef\x72\xae\
\x43\xf0\x6f\xac\x94\xf1\x6e\xaa\x56\xf1\x6d\xa9\x32\xf1\x6d\xaa\
\x3a\xf0\x6e\xab\x75\xf0\x70\xad\x85\xef\x73\xaf\x19\xf2\x6a\xa7\
\x07\xf0\x6e\xab\x5b\xf0\x70\xad\x7c\xef\x71\xae\x4d\xf0\x70\xac\
\x83\xf1\x6d\xaa\x1d\xf1\x6e\xaa\x00\xf1\x6f\xac\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xed\x78\xb4\x00\xed\x77\xb3\x00\xed\x77\xb3\x32\xee\x75\xb1\
\x81\xef\x72\xae\x16\xef\x72\xae\x00\xee\x75\xb1\x00\xef\x72\xaf\
\x00\xe0\x98\xcc\x00\xee\x73\xaf\x3f\xed\x76\xb2\x7c\xec\x79\xb5\
\x0a\xf7\x5f\x9d\x00\xef\x73\xaf\x3c\xee\x74\xb0\x88\xee\x73\xaf\
\x33\xee\x74\xb0\x00\xee\x73\xaf\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xea\x7f\xba\x00\xe9\x82\xbc\x03\xeb\x7d\xb8\x78\xec\x7b\xb6\
\x2d\xec\x7b\xb6\x00\xec\x7a\xb5\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xed\x78\xb4\x00\xe6\x8a\xc3\x00\xeb\x7c\xb7\x67\xeb\x7d\xb8\
\x43\xeb\x7d\xb8\x00\xed\x77\xb3\x00\xed\x77\xb3\x05\xec\x78\xb4\
\x01\xec\x79\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\x84\xbe\
\x00\xe8\x85\xbf\x00\xe7\x85\xbf\x21\xe8\x83\xbe\x7f\xe9\x81\xbb\
\x06\xe9\x81\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe9\x82\xbd\x00\xe9\x82\xbd\x30\xe8\x84\xbe\
\x75\xe4\x8e\xc7\x02\xe6\x88\xc2\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe5\x8d\xc6\
\x00\xe4\x8e\xc7\x0f\xe5\x8b\xc4\x7e\xe6\x88\xc2\x47\xe6\x8a\xc3\
\x00\xe7\x85\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe7\x88\xc2\x00\xe7\x87\xc1\x09\xe6\x89\xc3\
\x78\xe5\x8c\xc5\x52\xe7\x87\xc0\x00\xe4\x8f\xc8\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x91\xca\
\x00\xe3\x91\xca\x2e\xe3\x92\xca\x7d\xe1\x97\xcf\x10\xe1\x96\xce\
\x01\xe2\x95\xcd\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x91\xc9\
\x00\xe7\x80\xb8\x00\xe1\x96\xce\x04\xe1\x98\xd0\x09\xe2\x93\xcb\
\x2b\xe3\x91\xca\x81\xe3\x91\xca\x05\xe3\x91\xca\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe1\x96\xce\
\x00\xe2\x95\xcd\x0b\xe0\x98\xd0\x85\xe0\x99\xd1\xa7\xe0\x9a\xd2\
\x29\xe0\x9a\xd2\x00\xde\x9e\xd5\x00\xde\x9d\xd5\x0c\xdf\x9c\xd4\
\x3a\xe0\x9a\xd2\x63\xe0\x99\xd1\x7c\xe0\x99\xd1\x94\xe0\x99\xd1\
\xb0\xe1\x97\xcf\x4c\xdf\x9b\xd3\x00\xe2\x94\xcc\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xde\x9f\xd7\x00\xde\x9f\xd7\x4b\xde\x9f\xd6\xa1\xde\xa0\xd7\
\x4b\xe8\x83\xbd\x00\xdd\xa2\xd9\x3e\xde\xa0\xd7\x7c\xde\x9d\xd5\
\x6b\xde\x9f\xd6\x73\xde\xa0\xd7\x93\xde\x9e\xd6\x8f\xde\x9f\xd6\
\xa6\xde\x9f\xd7\x15\xde\x9f\xd7\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xdb\xa7\xde\x00\xdb\xa7\xde\x4b\xdb\xa7\xdd\x8e\xda\xa7\xde\
\x8e\xda\xa8\xdf\x83\xdb\xa7\xde\xbc\xda\xa8\xde\x6f\xda\xaa\xe0\
\x73\xdb\xa6\xdd\x7e\xdd\xa1\xd8\x27\xdc\xa3\xda\x0c\xdb\xa7\xde\
\x7b\xdb\xa7\xde\x17\xdb\xa7\xde\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd8\xaf\xe5\x00\xd8\xae\xe4\x46\xd8\xaf\xe5\x5f\xda\xa9\xdf\
\x1c\xd9\xaa\xe0\x27\xd9\xab\xe1\x33\xd9\xab\xe2\x4e\xd9\xac\xe3\
\x83\xd8\xaf\xe5\x86\xd6\xb2\xe8\x46\xd6\xb2\xe8\x10\xd8\xaf\xe5\
\x7d\xd8\xae\xe4\x12\xd8\xae\xe4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd5\xb5\xea\x00\xd5\xb5\xea\x27\xd4\xb7\xec\x78\xd2\xbc\xf1\
\x03\xd3\xba\xef\x00\x00\x00\x00\x00\xd6\xb2\xe7\x00\xd6\xb2\xe7\
\x01\xd7\xb1\xe7\x1c\xd5\xb4\xe9\x5e\xd4\xb7\xec\x49\xd5\xb6\xeb\
\x77\xd6\xb2\xe7\x03\xd6\xb3\xe9\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x04\xd3\xba\xef\x74\xd3\xba\xef\
\x41\xd3\xba\xef\x00\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x08\xd3\xba\xef\x75\xd3\xba\xef\
\x3f\xd3\xba\xef\x00\xd1\xbf\xf4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x1a\xd3\xba\xef\
\x81\xd3\xba\xef\x4c\xd3\xba\xef\x0e\xd3\xba\xef\x01\xd3\xba\xef\
\x03\xd3\xba\xef\x1e\xd3\xba\xef\x71\xd3\xba\xef\x60\xd3\xba\xef\
\x03\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x18\xd3\xba\xef\x72\xd3\xba\xef\x91\xd3\xba\xef\x87\xd3\xba\xef\
\x8b\xd3\xba\xef\x8f\xd3\xba\xef\x4e\xd3\xba\xef\x05\xd3\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xff\x83\xff\x00\xc0\x01\xff\x00\x80\x01\xff\
\x00\x80\x00\x0f\x00\x8f\xf0\x07\x00\x8f\xf8\x03\x00\xc3\xfc\x03\
\x00\xc0\x00\x01\x00\xf0\x00\x03\x00\xfc\x00\x07\x00\xfc\x00\x0f\
\x00\xf8\xf1\x1f\x00\xf1\xf9\x9f\x00\xf1\xf8\xff\x00\xe3\xf8\xff\
\x00\xe1\xf0\x7f\x00\xe1\x80\xff\x00\xf1\x00\xff\x00\xf0\x00\xff\
\x00\xf0\x00\xff\x00\xf1\xc0\xff\x00\xf1\xf1\xff\x00\xf8\x01\xff\
\x00\xfc\x03\xff\x00\x28\x00\x00\x00\x20\x00\x00\x00\x40\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x10\x00\x00\x86\x0b\x00\
\x00\x86\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x3e\xf5\x63\xa1\xbc\xf5\x63\xa1\xc2\xf5\x63\xa1\
\x73\xf5\x63\xa1\x3b\xf5\x63\xa1\x01\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x72\xf5\x63\xa1\x72\xf5\x63\xa1\x2e\xf5\x63\xa1\
\x99\xf5\x63\xa1\xba\xf5\x63\xa1\x33\xf5\x63\xa1\x02\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x07\xf5\x63\xa1\x26\xf5\x63\xa1\
\x2c\xf5\x63\xa1\x2b\xf5\x63\xa1\x2b\xf5\x63\xa1\x2b\xf5\x63\xa1\
\x2b\xf5\x63\xa1\x2b\xf5\x63\xa1\x2b\xf5\x63\xa1\x2b\xf5\x63\xa1\
\x29\xf5\x63\xa1\x75\xf5\x63\xa1\xb6\xf5\x63\xa1\x22\xf5\x63\xa1\
\x14\xf5\x63\xa1\x95\xf5\x63\xa1\xae\xf5\x63\xa1\x0a\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x38\xf5\x63\xa1\xba\xf5\x63\xa1\
\x98\xf5\x63\xa1\x98\xf5\x63\xa1\x98\xf5\x63\xa1\x98\xf5\x63\xa1\
\x98\xf5\x63\xa1\x98\xf5\x63\xa1\x98\xf5\x63\xa1\x98\xf5\x63\xa1\
\x98\xf5\x63\xa1\x99\xf5\x63\xa1\xb2\xf5\x63\xa1\xa7\xf5\x63\xa1\
\x23\xf5\x63\xa1\x17\xf5\x63\xa1\xa7\xf5\x63\xa1\x49\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x44\xf5\x63\xa1\x8a\xf5\x63\xa1\
\x05\xf5\x63\xa1\x06\xf5\x63\xa1\x06\xf5\x63\xa1\x06\xf5\x63\xa1\
\x06\xf5\x63\xa1\x06\xf5\x63\xa1\x06\xf5\x63\xa1\x06\xf5\x63\xa1\
\x06\xf5\x63\xa1\x06\xf5\x63\xa1\x05\xf5\x63\xa1\x56\xf5\x63\xa1\
\xa4\xf5\x63\xa1\x23\xf5\x63\xa1\x2e\xf5\x63\xa1\xa8\xf5\x63\xa1\
\x47\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x15\xf5\x63\xa1\x18\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x44\xf5\x63\xa1\x87\xf5\x63\xa1\
\x00\xf6\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x01\xf5\x63\xa1\
\x56\xf5\x63\xa1\xa4\xf5\x63\xa1\x23\xf5\x63\xa1\x2e\xf5\x63\xa1\
\xa8\xf5\x63\xa1\x47\xf5\x63\xa1\x00\xf5\x63\xa1\x39\xf5\x63\xa1\
\xa6\xf5\x63\xa1\xa8\xf5\x63\xa1\x22\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x43\xf5\x63\xa1\x89\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x01\xf5\x63\xa1\x56\xf5\x63\xa1\xa4\xf5\x63\xa1\x23\xf5\x63\xa1\
\x2e\xf5\x63\xa1\xa6\xf5\x63\xa1\x82\xf5\x63\xa1\xa6\xf5\x63\xa1\
\x40\xf5\x63\xa1\x5f\xf5\x63\xa1\x96\xf5\x63\xa1\x0e\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x2c\xf5\x63\xa1\x9f\xf5\x63\xa1\
\x0d\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x01\xf5\x63\xa1\x56\xf5\x63\xa1\xa4\xf5\x63\xa1\
\x22\xf5\x63\xa1\x2f\xf5\x63\xa1\x8d\xf5\x63\xa1\x2a\xf5\x63\xa1\
\x00\xf5\x63\xa1\x04\xf5\x63\xa1\x80\xf5\x63\xa1\x77\xf5\x63\xa1\
\x03\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x05\xf5\x63\xa1\x8a\xf5\x63\xa1\
\x73\xf5\x63\xa1\x0b\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x55\xf5\x63\xa1\
\xa8\xf5\x63\xa1\x19\xff\xae\xa4\x00\xf5\x63\xa1\x04\xf5\x63\xa1\
\x38\xf5\x63\xa1\x7e\xf5\x63\xa1\x3a\xf5\x63\xa1\x9b\xf5\x63\xa1\
\x52\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x17\xf5\x63\xa1\
\x89\xf5\x63\xa1\xa0\xf5\x63\xa1\x6e\xf5\x63\xa1\x33\xf5\x63\xa1\
\x0b\xf4\x68\xa5\x00\xf4\x64\xa3\x00\xf5\x63\xa1\x06\xf5\x63\xa1\
\x13\xf5\x63\xa1\x15\xf5\x63\xa1\x08\xf5\x5f\x9d\x00\xf5\x64\xa1\
\x00\xf5\x63\xa1\x09\xf5\x63\xa1\x2e\xf5\x63\xa1\x77\xf5\x63\xa1\
\xab\xf5\x63\xa1\x17\xf5\x63\xa1\x0f\xf5\x63\xa1\x83\xf5\x63\xa1\
\x9c\xf5\x63\xa1\xa4\xf5\x63\xa1\x92\xf5\x63\xa1\x2f\xf5\x63\xa1\
\xaf\xf5\x63\xa1\x21\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x06\xf5\x63\xa1\x2f\xf5\x63\xa1\x6a\xf5\x63\xa1\x9a\xf5\x63\xa1\
\x9e\xf5\x63\xa1\x72\xf5\x64\xa1\x68\xf5\x63\xa1\x97\xf5\x63\xa1\
\xa0\xf5\x63\xa1\xa0\xf5\x63\xa1\x9a\xf5\x63\xa1\x6f\xf5\x64\xa1\
\x6c\xf5\x63\xa1\x9a\xf5\x63\xa1\xc2\xf5\x63\xa1\xd1\xf5\x63\xa1\
\x2f\xf5\x63\xa1\x00\xf5\x63\xa1\x65\xf5\x63\xa1\x88\xf5\x63\xa1\
\x4a\xf5\x63\xa1\xa4\xf5\x63\xa1\x4d\xf5\x64\xa1\x6a\xf5\x63\xa1\
\xa4\xf5\x63\xa1\x1c\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa2\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x08\xf5\x63\xa1\
\x2a\xf4\x65\xa3\x83\xf4\x65\xa3\xb4\xf5\x63\xa1\x32\xf6\x61\x9f\
\x13\xf6\x61\x9f\x11\xf5\x63\xa1\x29\xf4\x65\xa3\xa4\xf4\x65\xa3\
\x96\xf5\x63\xa1\x2d\xf4\x66\xa3\x8e\xf4\x66\xa4\x7b\xf4\x66\xa4\
\x00\xf3\x67\xa5\x1a\xf4\x66\xa4\xa7\xf4\x66\xa4\x5f\xf4\x66\xa3\
\xa5\xf4\x65\xa3\x4a\xf3\x67\xa4\x87\xf4\x65\xa3\x8b\xf5\x63\xa1\
\x19\xf5\x64\xa2\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf2\x6a\xa7\
\x00\xf2\x6a\xa7\x3e\xf2\x6a\xa7\x95\xf1\x6d\xa9\x35\xf2\x6c\xa9\
\x55\xf2\x6c\xa9\x58\xf1\x6d\xa9\x37\xf2\x6a\xa7\x85\xf2\x6a\xa7\
\x57\xf2\x6a\xa7\x00\xf3\x68\xa6\x34\xf2\x6a\xa7\xa4\xf2\x6b\xa8\
\x5f\xf2\x69\xa6\x28\xf2\x6a\xa7\xb0\xf2\x6a\xa7\xb5\xf3\x69\xa6\
\x49\xf2\x6b\xa8\x8b\xf3\x69\xa6\x78\xf3\x67\xa5\x09\xf3\x67\xa5\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x70\xac\x00\xee\x74\xaf\
\x02\xf0\x6f\xab\x67\xf1\x6e\xaa\xd0\xf1\x6d\xaa\x96\xf1\x6d\xa9\
\x79\xf1\x6c\xa9\x77\xf1\x6d\xaa\x90\xf1\x6e\xaa\xcb\xf0\x6f\xab\
\x80\xef\x72\xae\x06\xf1\x6d\xaa\x00\xf2\x6c\xa9\x1f\xf1\x6d\xaa\
\x94\xf0\x6f\xab\x84\xf1\x6d\xaa\x39\xf1\x6c\xa9\x35\xf0\x6f\xab\
\x7a\xf1\x6d\xaa\x81\xf2\x6b\xa8\x07\xf2\x6c\xa9\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xee\x74\xb0\x00\xee\x75\xb1\x03\xef\x73\xaf\
\x69\xef\x71\xae\x9e\xf0\x70\xac\x33\xf0\x6f\xab\x05\xf0\x6e\xaa\
\x00\xf0\x6f\xab\x00\xf0\x6f\xab\x03\xf0\x70\xac\x28\xef\x71\xad\
\x92\xef\x73\xaf\x7d\xee\x74\xb0\x08\xef\x71\xae\x00\xf0\x6f\xac\
\x0c\xf0\x71\xad\x73\xef\x72\xae\x9a\xee\x73\xaf\x73\xef\x71\xae\
\x99\xf0\x70\xac\x11\xf0\x70\xac\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xed\x78\xb3\x00\xed\x77\xb2\x00\xed\x77\xb3\x4a\xed\x76\xb2\
\x9f\xee\x74\xb0\x18\xee\x74\xb0\x00\xee\x76\xb0\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x74\xb0\x00\xee\x73\xaf\
\x0d\xed\x76\xb1\x91\xed\x77\xb3\x61\xee\x75\xb1\x00\xee\x74\xb0\
\x00\xef\x72\xae\x03\xee\x74\xb0\x4d\xee\x75\xb1\x94\xee\x74\xb0\
\x27\xee\x74\xb0\x00\xee\x76\xb0\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xeb\x7d\xb8\x00\xea\x7e\xb8\x0b\xeb\x7c\xb7\x9d\xec\x7a\xb5\
\x3a\xec\x7b\xb6\x00\xed\x79\xb4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xec\x7a\xb5\
\x00\xec\x7a\xb5\x26\xeb\x7c\xb7\xa4\xeb\x7d\xb8\x18\xeb\x7d\xb8\
\x00\xeb\x7c\xb7\x00\xeb\x7b\xb6\x00\xec\x7a\xb5\x02\xec\x7a\xb5\
\x00\xeb\x7c\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe9\x81\xbc\x00\xe9\x81\xbc\x32\xe9\x81\xbb\x99\xea\x7f\xb9\
\x07\xea\x7f\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xea\x7f\xb9\
\x00\xf3\x69\xa4\x00\xe9\x81\xbb\x89\xe9\x81\xbc\x4a\xe9\x81\xbc\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x89\xc3\
\x00\xe5\x8b\xc4\x06\xe7\x87\xc1\x7c\xe7\x86\xc0\x71\xe7\x87\xc1\
\x00\xe8\x84\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdb\xa2\xd7\
\x00\xe7\x86\xc0\x00\xe7\x85\xc0\x57\xe7\x87\xc1\x91\xe6\x8a\xc3\
\x0c\xe6\x89\xc3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe4\x8e\xc7\x00\xef\x74\xae\
\x00\xe5\x8c\xc5\x6c\xe6\x8a\xc4\x9a\xe6\x89\xc3\x20\xe6\x89\xc3\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe6\x89\xc3\x00\xe6\x89\xc3\x14\xe6\x8a\xc4\x8d\xe5\x8c\xc5\
\x81\xe4\x8f\xc8\x04\xe4\x8e\xc7\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x91\xca\x00\xe3\x91\xca\
\x0c\xe3\x91\xc9\xa1\xe3\x90\xc9\x27\xe3\x90\xc9\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe4\x90\xc9\x00\xe4\x90\xc9\x16\xe3\x91\xc9\
\xa4\xe3\x91\xca\x1a\xe3\x91\xca\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe2\x94\xcc\x00\xe3\x92\xca\
\x03\xe1\x95\xce\x84\xe1\x97\xcf\x9c\xe1\x98\xd0\x71\xe0\x98\xd0\
\x12\xe0\x98\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe2\x96\xce\
\x00\xdf\x9b\xd2\x00\xe0\x9a\xd1\x06\xe0\x99\xd1\x21\xe0\x99\xd0\
\x42\xe0\x98\xd0\x5b\xe1\x98\xd0\x6a\xe1\x97\xcf\x87\xe1\x96\xce\
\x96\xe2\x93\xcc\x0b\xe2\x94\xcc\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd7\xae\xe4\x00\xe0\x99\xd1\
\x00\xe0\x99\xd1\x17\xdf\x9b\xd2\xaf\xdf\x9b\xd2\xb8\xdf\x9b\xd3\
\x53\xdf\x9b\xd3\x00\xde\x9d\xd5\x00\xe2\x9c\xd4\x00\xde\x9e\xd6\
\x1a\xde\x9d\xd5\x61\xdf\x9c\xd3\x98\xdf\x9b\xd2\x9d\xe0\x9a\xd2\
\x95\xdf\x9b\xd2\xa0\xdf\x9b\xd3\xbc\xdf\x9b\xd2\xcd\xe0\x99\xd1\
\x26\xe0\x9a\xd2\x00\xdf\x9c\xd4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdd\xa1\xd8\
\x00\xdd\xa1\xd8\x06\xdd\xa0\xd7\x93\xdd\xa0\xd7\x93\xdd\xa0\xd7\
\x66\xdd\xa1\xd8\x00\xdc\xa4\xda\x0a\xdd\xa2\xd9\x5e\xdd\xa0\xd7\
\xa3\xde\x9e\xd6\x79\xde\x9e\xd5\x3f\xdd\xa2\xd9\x62\xdd\xa1\xd8\
\x9c\xdd\xa0\xd7\x98\xde\x9f\xd7\x8a\xdd\xa0\xd7\xb1\xdd\xa1\xd8\
\x10\xdd\xa0\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdb\xa6\xdd\
\x00\xdb\xa6\xdd\x06\xdb\xa6\xdd\x94\xdb\xa6\xdc\x87\xdb\xa7\xdd\
\xab\xda\xa8\xdf\x6e\xdb\xa7\xde\xa6\xdb\xa6\xdd\xcd\xdb\xa7\xdd\
\x58\xda\xaa\xe0\x25\xdb\xa7\xde\x88\xdc\xa5\xdb\x90\xdc\xa3\xda\
\x38\xdd\xa1\xd8\x08\xdb\xa6\xdd\x18\xdb\xa6\xdd\x9f\xdb\xa6\xdd\
\x12\xdb\xa6\xdd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xab\xe1\
\x00\xd9\xab\xe1\x05\xd9\xac\xe2\x95\xd9\xab\xe1\x49\xda\xa9\xe0\
\x55\xda\xa9\xe0\x5f\xda\xa9\xe0\x6e\xda\xaa\xe0\x7e\xd9\xab\xe1\
\x93\xd9\xac\xe2\xb5\xd9\xac\xe2\xcf\xd8\xae\xe4\x56\xd6\xb2\xe8\
\x0b\xd8\xad\xe3\x00\xd9\xac\xe2\x1b\xd9\xac\xe2\x9f\xd9\xac\xe2\
\x10\xd9\xac\xe2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd7\xaf\xe5\
\x00\xd1\xc0\xf3\x00\xd7\xb1\xe7\x89\xd6\xb2\xe8\x47\xd6\xb2\xe8\
\x00\x00\x00\x00\x00\xd5\xb7\xed\x00\xdb\xa5\xdb\x00\xd8\xae\xe4\
\x04\xd8\xae\xe4\x18\xd8\xaf\xe5\x45\xd7\xb0\xe6\x8e\xd6\xb2\xe8\
\x9e\xd5\xb4\xea\x35\xd6\xb2\xe8\x2e\xd7\xb1\xe7\x99\xd7\xb0\xe6\
\x07\xd7\xb0\xe6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd5\xb5\xeb\
\x00\xd4\xb7\xec\x00\xd4\xb7\xec\x5a\xd4\xb8\xed\x81\xe5\x8d\xc2\
\x00\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd6\xb3\xe9\x00\xd6\xb3\xe9\x04\xd5\xb5\xea\
\x43\xd5\xb5\xeb\x4e\xd4\xb8\xed\x6a\xd4\xb7\xec\x73\xd4\xb8\xed\
\x00\xd5\xb5\xea\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x17\xd3\xba\xef\xa5\xd3\xba\xef\
\x35\xd3\xba\xef\x00\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\xc9\xfd\x00\xd3\xba\xef\
\x00\xd3\xba\xef\x22\xd3\xba\xef\xa7\xd3\xba\xef\x27\xd3\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\x48\xd3\xba\xef\
\xa6\xd3\xba\xef\x2b\xd3\xba\xef\x00\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x1d\xd3\xba\xef\x9c\xd3\xba\xef\x5e\xd3\xba\xef\x00\xd3\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x01\xd3\xba\xef\
\x4e\xd3\xba\xef\xa7\xd3\xba\xef\x66\xd3\xba\xef\x24\xd3\xba\xef\
\x0d\xd3\xba\xef\x0c\xd3\xba\xef\x1e\xd3\xba\xef\x5a\xd3\xba\xef\
\xa5\xd3\xba\xef\x61\xd3\xba\xef\x04\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xf0\
\x00\xd3\xba\xef\x2a\xd3\xba\xef\x89\xd3\xba\xef\xb5\xd3\xba\xef\
\xb5\xd3\xba\xef\xb5\xd3\xba\xef\xb7\xd3\xba\xef\x94\xd3\xba\xef\
\x37\xd3\xba\xef\x01\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xf8\x1f\
\xff\xff\xf8\x0f\xff\xc0\x00\x0f\xff\xc0\x00\x0f\xff\xc0\x00\x07\
\x3f\xcf\xfe\x02\x1f\xcf\xff\x00\x0f\xc7\xff\x80\x87\xc3\xff\xe2\
\x07\xe0\x61\x80\x03\xf0\x00\x04\x03\xfe\x00\x08\x07\xff\x80\x40\
\x0f\xff\x00\x20\x1f\xfe\x0c\x10\x3f\xfe\x3f\x18\x7f\xfc\x7f\x8e\
\xff\xfc\x7f\xcf\xff\xf8\xff\xc7\xff\xf8\xff\xc3\xff\xf1\xff\xe3\
\xff\xf0\x7c\x03\xff\xf8\x70\x07\xff\xf8\x40\x07\xff\xf8\x00\x07\
\xff\xf8\x00\x47\xff\xfc\xf0\x07\xff\xfc\xfe\x0f\xff\xfc\x7f\x8f\
\xff\xfe\x3f\x1f\xff\xfe\x00\x1f\xff\xff\x80\x3f\xff\x28\x00\x00\
\x00\x30\x00\x00\x00\x60\x00\x00\x00\x01\x00\x20\x00\x00\x00\x00\
\x00\x00\x24\x00\x00\x86\x0b\x00\x00\x86\x0b\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x3a\xf5\x63\xa1\xba\xf5\x63\xa1\
\xe7\xf5\x63\xa1\xd5\xf5\x63\xa1\x38\xf5\x63\xa1\x14\xf5\x63\xa1\
\x0c\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x88\xf5\x63\xa1\xc9\xf5\x63\xa1\
\x5e\xf5\x63\xa1\xa6\xf5\x63\xa1\xd9\xf5\x63\xa1\xcf\xf5\x63\xa1\
\x77\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x06\xf5\x63\xa1\xb3\xf5\x63\xa1\x86\xf5\x63\xa1\
\x00\xf5\x63\xa1\x14\xf5\x63\xa1\x5a\xf5\x63\xa1\xc2\xf5\x63\xa1\
\xbc\xf5\x63\xa1\x50\xf5\x63\xa1\x12\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x03\xf5\x63\xa1\x86\xf5\x63\xa1\xcf\xf5\x63\xa1\
\x39\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x74\xf5\x63\xa1\
\xd5\xf5\x63\xa1\xec\xf5\x63\xa1\x40\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x03\xf5\x63\xa1\x60\xf5\x63\xa1\x97\xf5\x63\xa1\
\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\
\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\
\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\x96\xf5\x63\xa1\
\x96\xf5\x63\xa1\x95\xf5\x63\xa1\xa4\xf5\x63\xa1\xf1\xf5\x63\xa1\
\xd6\xf5\x63\xa1\x39\xf5\x63\xa1\x00\xf5\x63\xa1\x04\xf5\x63\xa1\
\x3b\xf5\x63\xa1\xdf\xf5\x63\xa1\x47\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0a\xf5\x63\xa1\xbe\xf5\x63\xa1\xc3\xf5\x63\xa1\
\x95\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\
\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\
\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x97\xf5\x63\xa1\
\x97\xf5\x63\xa1\x97\xf5\x63\xa1\x96\xf5\x63\xa1\x92\xf5\x63\xa1\
\xb4\xf5\x63\xa1\xcf\xf5\x63\xa1\x3a\xf5\x63\xa1\x00\xf5\x63\xa1\
\x0e\xf5\x63\xa1\xa0\xf5\x63\xa1\xc7\xf5\x63\xa1\x2d\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0a\xf5\x63\xa1\xbf\xf5\x63\xa1\x65\xf5\x63\xa1\
\x00\xfd\x63\x9c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x0e\xf5\x63\xa1\x94\xf5\x63\xa1\xd1\xf5\x63\xa1\x3a\xf5\x63\xa1\
\x00\xf5\x63\xa1\x14\xf5\x63\xa1\xa4\xf5\x63\xa1\xc8\xf5\x63\xa1\
\x2d\xf5\x63\xa1\x00\xf5\x63\xa2\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa2\x00\xf5\x63\xa2\x00\xf5\x63\xa2\
\x00\xf5\x63\xa2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0a\xf5\x63\xa1\xbf\xf5\x63\xa1\x65\xf5\x63\xa1\
\x00\xfa\x63\x9e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\x94\xf5\x63\xa1\xd1\xf5\x63\xa1\
\x3a\xf5\x63\xa1\x00\xf5\x63\xa1\x14\xf5\x63\xa1\xa4\xf5\x63\xa1\
\xc7\xf5\x63\xa1\x2d\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x38\xf5\x63\xa1\x90\xf5\x63\xa1\
\x46\xf3\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0a\xf5\x63\xa1\xbf\xf5\x63\xa1\x65\xf5\x63\xa1\
\x00\xfa\x63\x9e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\x94\xf5\x63\xa1\
\xd1\xf5\x63\xa1\x3a\xf5\x63\xa1\x00\xf5\x63\xa1\x14\xf5\x63\xa1\
\xa4\xf5\x63\xa1\xc7\xf5\x63\xa1\x2d\xf5\x63\xa1\x00\xf5\x63\xa1\
\x05\xf5\x63\xa1\x5f\xf5\x63\xa1\xd6\xf5\x63\xa1\xca\xf5\x63\xa1\
\xd2\xf5\x63\xa1\x33\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0a\xf5\x63\xa1\xbf\xf5\x63\xa1\x65\xf5\x63\xa1\
\x00\xfa\x63\x9d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\
\x94\xf5\x63\xa1\xd1\xf5\x63\xa1\x3a\xf5\x63\xa1\x00\xf5\x63\xa1\
\x14\xf5\x63\xa1\xa4\xf5\x63\xa1\xc7\xf5\x63\xa1\x38\xf5\x63\xa1\
\x7f\xf5\x63\xa1\xdc\xf5\x63\xa1\x6c\xf5\x63\xa1\x14\xf5\x63\xa1\
\x9f\xf5\x63\xa1\xbc\xf5\x63\xa1\x19\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x07\xf5\x63\xa1\xb7\xf5\x63\xa1\x76\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x0e\xf5\x63\xa1\x94\xf5\x63\xa1\xd1\xf5\x63\xa1\x3a\xf5\x63\xa1\
\x00\xf5\x63\xa1\x14\xf5\x63\xa1\xa2\xf5\x63\xa1\xe8\xf5\x63\xa1\
\xd3\xf5\x63\xa1\x4b\xf5\x63\xa1\x01\xf5\x63\xa1\x00\xf5\x63\xa1\
\x1d\xf5\x63\xa1\xc3\xf5\x63\xa1\x97\xf5\x63\xa1\x08\xf5\x63\xa1\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x89\xf5\x63\xa1\xb1\xf5\x63\xa1\
\x09\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\x94\xf5\x63\xa1\xd1\xf5\x63\xa1\
\x3a\xf5\x63\xa1\x00\xf5\x63\xa1\x16\xf5\x63\xa1\x77\xf5\x63\xa1\
\x38\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x3a\xf5\x63\xa1\xd8\xf5\x63\xa1\x6c\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x32\xf5\x63\xa1\xd8\xf5\x63\xa1\
\x66\xf5\x63\xa1\x02\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x0e\xf5\x63\xa1\x93\xf5\x63\xa1\
\xce\xf5\x63\xa1\x39\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x02\xf5\x63\xa1\x2e\xf5\x63\xa1\
\x63\xf5\x63\xa1\x13\xf5\x63\xa1\x5f\xf5\x63\xa1\xdb\xf5\x63\xa1\
\x44\xf5\x63\xa1\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x01\xf5\x63\xa1\x65\xf5\x63\xa1\
\xdd\xf5\x63\xa1\x85\xf5\x63\xa1\x29\xf5\x63\xa1\x06\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x1c\xf5\x63\xa1\
\xd1\xf5\x63\xa1\xa0\xf5\x63\xa1\x03\xf5\x63\xa1\x00\xf5\x63\xa1\
\x01\xf5\x63\xa1\x26\xf5\x63\xa1\x88\xf5\x63\xa1\xd6\xf5\x63\xa1\
\xe7\xf5\x63\xa1\x8a\xf5\x63\xa1\x0c\xf5\x63\xa1\x8a\xf5\x63\xa1\
\xca\xf5\x63\xa1\x24\xf5\x63\xa1\x00\xf6\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x03\xf5\x63\xa1\
\x50\xf5\x63\xa1\xbf\xf5\x63\xa1\xd7\xf5\x63\xa1\xaf\xf5\x63\xa1\
\x6b\xf5\x63\xa1\x2c\xf5\x63\xa1\x08\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x02\xf5\x63\xa1\
\x0a\xf5\x63\xa1\x10\xf5\x63\xa1\x0c\xf5\x63\xa1\x03\xf5\x63\xa1\
\x00\xf5\x63\xa1\x00\xf5\x65\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x06\xf5\x63\xa1\x27\xf5\x63\xa1\x63\xf5\x63\xa1\xb2\xf5\x63\xa1\
\xd3\xf5\x63\xa1\x3b\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x57\xf5\x63\xa1\xd5\xf5\x63\xa1\xbb\xf5\x63\xa1\x51\xf5\x63\xa1\
\x87\xf5\x63\xa1\xe5\xf5\x63\xa1\x2d\xf5\x63\xa1\x11\xf5\x63\xa1\
\xbe\xf5\x63\xa1\x98\xf5\x63\xa1\x02\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x00\xf5\x63\xa1\x10\xf5\x63\xa1\x42\xf5\x63\xa1\x88\xf5\x63\xa1\
\xc4\xf5\x63\xa1\xd7\xf5\x63\xa1\xb4\xf5\x63\xa1\x71\xf5\x63\xa1\
\x2f\xf5\x63\xa1\x21\xf5\x63\xa1\x61\xf5\x63\xa1\x9f\xf5\x63\xa1\
\xbe\xf5\x63\xa1\xc7\xf5\x63\xa1\xc1\xf5\x63\xa1\xa8\xf5\x63\xa1\
\x70\xf5\x63\xa1\x2a\xf5\x63\xa1\x27\xf5\x63\xa1\x66\xf5\x63\xa1\
\xad\xf5\x63\xa1\xd6\xf5\x63\xa1\xf5\xf5\x63\xa1\xec\xf5\x63\xa1\
\x4f\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x1c\xf5\x63\xa1\
\xcb\xf5\x63\xa1\x81\xf5\x63\xa1\x0e\xf5\x63\xa1\x60\xf5\x63\xa1\
\xda\xf5\x63\xa1\x78\xf5\x63\xa1\x0c\xf5\x63\xa1\x55\xf5\x63\xa1\
\xd8\xf5\x63\xa1\x88\xf5\x63\xa1\x01\xf5\x63\xa1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xf5\x63\xa1\x00\xf5\x63\xa1\x00\xf5\x63\xa1\
\x10\xf5\x63\xa1\x3d\xf5\x63\xa1\x82\xf5\x64\xa2\xc0\xf5\x64\xa2\
\xd8\xf5\x64\xa2\xda\xf5\x64\xa2\xcc\xf5\x63\xa1\x95\xf5\x63\xa1\
\x6a\xf5\x63\xa1\x5a\xf5\x63\xa1\x64\xf5\x63\xa1\x8a\xf5\x64\xa2\
\xc2\xf5\x64\xa2\xdb\xf5\x64\xa2\xd8\xf5\x64\xa2\xc8\xf5\x63\xa1\
\x89\xf5\x64\xa2\x74\xf5\x64\xa2\xe1\xf5\x63\xa1\x70\xf6\x61\x9f\
\x02\xf5\x64\xa1\x00\xf5\x62\xa1\x00\xf5\x64\xa2\x77\xf5\x64\xa2\
\xc6\xf5\x63\xa1\x19\xf4\x65\xa2\x68\xf5\x64\xa2\xdd\xf5\x63\xa1\
\x64\xf4\x65\xa2\x10\xf4\x65\xa2\x83\xf5\x64\xa2\xdc\xf5\x63\xa1\
\x78\xf5\x63\xa1\x0e\xf5\x63\xa1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf5\x65\xa3\x00\xf5\x67\xa5\x00\xf4\x64\xa2\x0b\xf4\x66\xa4\
\x6d\xf4\x66\xa4\xd7\xf4\x66\xa3\x28\xef\x6f\xaa\x00\xf4\x64\xa2\
\x00\x00\x00\x00\x00\xf5\x65\xa3\x00\xf2\x67\xa4\x00\xf4\x65\xa3\
\x13\xf4\x66\xa4\xbc\xf4\x66\xa4\x94\xf4\x64\xa2\x11\xf3\x67\xa4\
\x00\xf4\x67\xa4\x67\xf3\x67\xa4\xdf\xf3\x67\xa5\x33\xf4\x66\xa4\
\x00\xf3\x67\xa5\x00\xf3\x67\xa5\x22\xf3\x67\xa4\xd2\xf4\x66\xa4\
\x64\xf3\x67\xa5\x5d\xf3\x66\xa4\xdd\xf4\x66\xa3\x5d\xf3\x68\xa5\
\x16\xf3\x67\xa5\xa1\xf4\x66\xa4\xd0\xf4\x65\xa3\x49\xf4\x64\xa2\
\x02\xf4\x65\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf3\x69\xa6\x00\xf3\x69\xa7\
\x4d\xf2\x69\xa6\xcc\xf2\x69\xa6\x15\xf2\x6a\xa8\x07\xf2\x6b\xa8\
\x18\xf2\x6b\xa8\x20\xf2\x6b\xa8\x1a\xf2\x6a\xa8\x0b\xf2\x69\xa6\
\x04\xf2\x69\xa6\xaf\xf3\x69\xa6\x7a\xf2\x69\xa7\x00\xf3\x68\xa6\
\x00\xf3\x68\xa5\x1e\xf3\x69\xa6\xab\xf2\x69\xa7\xcd\xf2\x6a\xa7\
\x48\xf3\x66\xa3\x00\xf3\x69\xa6\x3b\xf2\x69\xa6\xe2\xf2\x6a\xa7\
\xaa\xf2\x69\xa6\xd9\xf3\x68\xa6\x67\xf2\x6a\xa7\x15\xf2\x6a\xa7\
\xa8\xf3\x69\xa6\xc5\xf3\x68\xa5\x2f\xf3\x69\xa6\x00\xf2\x68\xa5\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x6c\xa9\x00\xf1\x6c\xa9\
\x4d\xf1\x6c\xa9\xdd\xf1\x6d\xa9\x8d\xf1\x6c\xa9\xb7\xf1\x6c\xa9\
\xcf\xf1\x6c\xa9\xd2\xf1\x6c\xa9\xd0\xf1\x6c\xa9\xbf\xf1\x6d\xa9\
\x8f\xf1\x6c\xa9\xcf\xf2\x6c\xa9\x79\xf1\x6c\xa9\x00\xf1\x6c\xa9\
\x00\xf2\x6a\xa7\x00\xf2\x6a\xa7\x11\xf2\x6b\xa8\x86\xf1\x6c\xa9\
\xdc\xf1\x6d\xa9\x70\xf1\x6c\xa9\x0c\xf2\x6b\xa8\x4f\xf2\x6c\xa8\
\xce\xf2\x6b\xa8\x7b\xf1\x6c\xa9\x0f\xf1\x6c\xa9\x97\xf2\x6c\xa9\
\xc9\xf2\x6b\xa8\x2a\xf2\x6b\xa8\x00\xf1\x6b\xa8\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xf0\x6f\xac\x00\xf0\x70\xac\x00\xf0\x70\xac\x14\xf0\x6f\xac\
\x96\xf0\x6f\xab\xf2\xf1\x6e\xab\xba\xf1\x6e\xaa\x77\xf1\x6d\xaa\
\x4d\xf1\x6d\xaa\x3e\xf1\x6d\xaa\x47\xf1\x6d\xaa\x6c\xf1\x6e\xaa\
\xac\xf0\x6e\xab\xec\xf0\x6f\xab\xb7\xf0\x70\xac\x24\xf0\x6f\xac\
\x00\xef\x70\xac\x00\xf1\x6d\xaa\x00\xf1\x6d\xa9\x05\xf1\x6e\xaa\
\x5c\xf0\x6e\xab\xd7\xf0\x6f\xac\x9b\xf0\x70\xad\x19\xf1\x6c\xa9\
\x12\xf1\x6e\xaa\x09\xf0\x6f\xac\x74\xf0\x6e\xab\xd7\xf1\x6e\xaa\
\x38\xf1\x6e\xab\x00\xf0\x6d\xaa\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x73\xaf\
\x00\xef\x72\xae\x00\xef\x72\xae\x20\xef\x72\xae\xb2\xef\x71\xad\
\xc9\xf0\x70\xad\x50\xf0\x70\xac\x0b\xf1\x6f\xac\x00\xee\x71\xad\
\x00\x00\x00\x00\x00\xed\x72\xaf\x00\xf0\x6f\xab\x00\xf0\x70\xac\
\x06\xf0\x70\xac\x3b\xf0\x71\xad\xb4\xef\x71\xae\xca\xef\x72\xae\
\x37\xef\x72\xae\x00\xee\x73\xae\x00\xf0\x70\xac\x00\xed\x33\x76\
\x00\xf0\x70\xac\x36\xef\x71\xad\xc0\xef\x72\xae\xc0\xef\x72\xaf\
\x33\xef\x72\xae\x48\xef\x71\xad\xdc\xf0\x70\xad\x58\xef\x72\xaf\
\x00\xef\x70\xac\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x75\xb1\
\x00\xee\x75\xb1\x13\xee\x74\xb0\xb0\xee\x73\xb0\xb8\xef\x73\xaf\
\x22\xef\x73\xaf\x00\xee\x72\xaf\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x72\xaf\
\x00\xef\x73\xae\x00\xef\x72\xae\x12\xee\x73\xaf\x98\xee\x74\xb0\
\xcc\xee\x75\xb1\x29\xee\x75\xb0\x00\x00\x00\x00\x00\xee\x72\xaf\
\x00\xef\x73\xaf\x00\xef\x72\xae\x1b\xee\x73\xaf\x9b\xee\x74\xb0\
\xd9\xee\x74\xb0\xdd\xee\x73\xaf\x86\xef\x71\xad\x04\xef\x72\xae\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xec\x79\xb4\x00\xe7\x87\xbf\
\x00\xed\x78\xb3\x7e\xed\x77\xb2\xcb\xed\x75\xb1\x25\xed\x76\xb2\
\x00\xed\x75\xb2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xee\x75\xb1\x00\xee\x75\xb1\x11\xed\x77\xb2\
\xae\xed\x77\xb3\xa6\xec\x79\xb4\x09\xec\x79\xb4\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xee\x75\xb1\x00\xee\x75\xb1\x0a\xed\x76\xb2\
\x63\xed\x76\xb2\x78\xee\x75\xb1\x12\xed\x75\xb1\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xeb\x7b\xb6\x00\xeb\x7b\xb6\
\x28\xec\x7b\xb6\xd4\xec\x7a\xb5\x58\xeb\x7b\xb6\x00\xec\x7a\xb5\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xec\x7a\xb5\x00\xec\x7a\xb5\
\x33\xec\x7b\xb6\xd7\xeb\x7b\xb6\x4a\xeb\x7b\xb6\x00\xec\x7b\xb6\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xea\x80\xbb\x00\xea\x7e\xb8\x00\xea\x7f\xb9\
\x72\xea\x7e\xb9\xc0\xeb\x7d\xb8\x0f\xeb\x7d\xb8\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xeb\x7d\xb8\x00\xeb\x7a\xb6\
\x02\xea\x7e\xb9\x9c\xea\x7e\xb9\x9e\xe9\x82\xbc\x02\xea\x80\xba\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe9\x82\xbd\x00\xe8\x83\xbd\x05\xe9\x82\xbc\
\xaf\xe9\x81\xbc\x83\xe8\x82\xbd\x00\xea\x80\xbb\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe9\x81\xbb\x00\xe9\x82\xbc\
\x00\xe9\x81\xbc\x57\xe9\x82\xbc\xcc\xe9\x82\xbd\x17\xe9\x82\xbd\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xe8\x7f\xbc\x00\xe7\x86\xc0\x00\xe7\x86\xc0\x23\xe8\x85\xbf\
\xd3\xe8\x85\xbf\x4a\xe8\x85\xbf\x00\xd6\xaa\xe2\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\x84\xbf\
\x00\xe8\x84\xbf\x26\xe8\x85\xbf\xd5\xe7\x86\xc0\x45\xe8\x85\xbf\
\x00\xe7\x85\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe5\x8b\xc4\
\x00\xe6\x89\xc3\x00\xe6\x8a\xc4\x31\xe6\x89\xc3\xb8\xe6\x88\xc2\
\xcb\xe7\x88\xc2\x1a\xe7\x88\xc2\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe7\x88\xc2\
\x00\xe7\x87\xc2\x07\xe6\x88\xc2\xab\xe6\x89\xc3\xcf\xe6\x8a\xc3\
\x4a\xe3\x90\xc9\x00\xe5\x8a\xc4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe5\x8d\xc7\
\x00\xe4\x8d\xc7\x14\xe5\x8c\xc6\xbe\xe5\x8b\xc5\xa8\xe6\x8a\xc4\
\x2f\xe7\x89\xc3\x01\xe6\x8a\xc4\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x8a\xc4\
\x00\xe5\x8b\xc4\x00\xe6\x8a\xc4\x20\xe5\x8b\xc5\x8b\xe5\x8c\xc6\
\xd5\xe5\x8d\xc6\x2d\xe5\x8d\xc6\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe4\x90\xc9\
\x00\xe3\x90\xc9\x43\xe4\x90\xc9\xd4\xe4\x8f\xc8\x24\xe4\x8f\xc8\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe4\x8e\xc8\x00\xe4\x8e\xc8\x0d\xe4\x90\xc8\
\xbf\xe4\x90\xc9\x6e\xe4\x90\xc8\x00\xe3\x92\xca\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe2\x93\xcc\
\x00\xe2\x93\xcb\x41\xe2\x93\xcc\xd6\xe2\x94\xcd\x2f\xe1\x96\xce\
\x0c\xe1\x95\xcd\x06\xe1\x96\xce\x00\xe3\x92\xca\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x7d\xba\x00\xe2\x92\xcb\
\x00\xe5\x8c\xc7\x01\xe5\x8c\xc6\x01\xe2\x94\xcd\x15\xe2\x94\xcc\
\xc4\xe2\x93\xcc\x6c\xe2\x93\xcc\x00\xe3\x92\xcb\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe1\x96\xce\
\x00\xe1\x96\xcd\x10\xe1\x96\xce\xad\xe1\x97\xcf\xd1\xe1\x97\xcf\
\xc5\xe1\x98\xcf\x9a\xe0\x98\xd0\x19\xe0\x98\xd0\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xe2\x96\xcf\x00\xe6\x92\xcb\x00\xe0\x99\xd0\
\x0a\xe0\x99\xd0\x26\xe0\x99\xd0\x4b\xe0\x98\xd0\x6f\xe1\x98\xd0\
\x8b\xe1\x98\xd0\x9e\xe1\x98\xd0\xa4\xe1\x97\xcf\xc0\xe1\x97\xcf\
\xc8\xe1\x96\xce\x26\xe1\x96\xce\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\x99\xd1\
\x00\xe0\x99\xd0\x00\xe0\x98\xd0\x1a\xe0\x9a\xd2\xb7\xe0\x9a\xd2\
\xc2\xe0\x9a\xd2\xce\xdf\x9b\xd2\x70\xe0\x9a\xd2\x00\xdf\x9c\xd3\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\x98\xd2\x00\xdf\x9c\xd4\
\x00\xdf\x9c\xd4\x08\xdf\x9c\xd3\x36\xdf\x9b\xd3\x7f\xdf\x9b\xd3\
\xbb\xe0\x9a\xd2\xd5\xe0\x9a\xd2\xcf\xe0\x9a\xd1\xb8\xe0\x99\xd1\
\xa6\xe0\x9a\xd1\xa0\xe0\x9a\xd2\xcb\xe0\x9a\xd2\xd5\xe0\x99\xd1\
\x2f\xdf\x9a\xd2\x00\xe0\x99\xd1\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xde\xa0\xd6\x00\xde\x9d\xd5\x00\xde\x9e\xd5\x94\xde\x9e\xd5\
\x97\xde\x9e\xd5\x9d\xde\x9e\xd5\x90\xde\x9d\xd5\x00\xde\x9e\xd5\
\x00\x00\x00\x00\x00\xdd\x9f\xd7\x00\xdd\x9f\xd7\x09\xde\x9f\xd6\
\x4c\xde\x9e\xd6\xae\xde\x9e\xd5\xd9\xde\x9d\xd4\xb8\xdf\x9c\xd4\
\x74\xdf\x9c\xd4\x3f\xde\x9e\xd5\x40\xde\x9f\xd6\x72\xde\x9e\xd6\
\xa8\xde\x9e\xd6\xc6\xde\x9e\xd5\xe6\xde\x9e\xd5\xbc\xde\x9e\xd5\
\x09\xde\x9e\xd5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xdd\xa1\xd8\x00\xdd\xa1\xd9\x00\xdd\xa1\xd8\x95\xdd\xa1\xd8\
\x96\xdd\xa1\xd8\x92\xdd\xa1\xd8\x9a\xdd\xa1\xd9\x00\xdd\xa2\xd9\
\x00\xd6\xb9\xe4\x00\xdc\xa3\xda\x2d\xdd\xa2\xd9\xa5\xdd\xa1\xd8\
\xd9\xdd\xa0\xd7\x93\xde\x9f\xd7\x38\xde\x9e\xd5\x07\xdc\xa4\xda\
\x20\xdc\xa3\xd9\x88\xdd\xa1\xd9\xd4\xdd\xa1\xd8\xc7\xdd\xa0\xd7\
\x8e\xdd\x9f\xd7\x57\xdd\xa1\xd8\x90\xdd\xa1\xd8\xbd\xdd\xa1\xd8\
\x09\xdd\xa1\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xdb\xa5\xdb\x00\xdc\xa6\xdc\x00\xdb\xa5\xdc\x95\xdc\xa5\xdc\
\x95\xdc\xa5\xdc\x87\xdb\xa5\xdc\xba\xdb\xa7\xdd\x3f\xdb\xa7\xdd\
\x37\xdb\xa6\xdd\x60\xdb\xa5\xdc\xd9\xdc\xa5\xdb\xba\xdc\xa4\xda\
\x39\xde\xa0\xd7\x02\xd9\xab\xe1\x02\xdb\xa6\xdd\x51\xdb\xa5\xdc\
\xcf\xdc\xa4\xdb\xbe\xdc\xa4\xda\x55\xdc\xa3\xd9\x13\xdd\xa5\xdc\
\x00\xdc\xa5\xdc\x00\xdb\xa5\xdc\x69\xdc\xa5\xdc\xbd\xdc\xa5\xdc\
\x09\xdc\xa5\xdc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xda\xa8\xdf\x00\xda\xaa\xdf\x00\xda\xa9\xdf\x95\xda\xa9\xdf\
\x95\xda\xa8\xdf\x62\xda\xa9\xdf\xda\xda\xa9\xdf\xd2\xda\xa9\xdf\
\xd4\xda\xa9\xdf\xe4\xda\xa9\xdf\xf3\xda\xa9\xe0\xd2\xda\xaa\xe0\
\xad\xda\xaa\xe0\x85\xda\xaa\xe0\x85\xda\xa9\xdf\xe2\xda\xa8\xde\
\x82\xdb\xa5\xdc\x0f\xdb\xa7\xdd\x00\xdc\xa0\xd7\x00\x00\x00\x00\
\x00\xda\xa9\xdf\x00\xda\xa9\xdf\x6a\xda\xa9\xdf\xbd\xda\xa9\xdf\
\x09\xda\xa9\xdf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd9\xac\xe2\x00\xd8\xad\xe4\x00\xd8\xad\xe3\x8f\xd8\xad\xe3\
\x9e\xd9\xab\xe1\x09\xd9\xaa\xe1\x1d\xd9\xaa\xe1\x1f\xd9\xaa\xe1\
\x24\xd9\xaa\xe1\x2e\xd9\xab\xe1\x3d\xd9\xab\xe1\x5a\xd9\xab\xe2\
\x7f\xd9\xac\xe2\xab\xd9\xac\xe2\xd6\xd8\xad\xe3\xed\xd8\xae\xe4\
\xa8\xd8\xae\xe4\x4f\xd7\xaf\xe5\x0e\xd7\xaf\xe5\x00\xd9\xad\xe3\
\x00\xd8\xad\xe3\x00\xd8\xad\xe3\x71\xd8\xad\xe3\xb8\xd9\xac\xe2\
\x07\xd9\xac\xe2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd7\xaf\xe5\x00\xd7\xb1\xe7\x00\xd7\xb0\xe6\x75\xd7\xb1\xe6\
\xb7\xd6\xb1\xe7\x08\xd7\xb1\xe7\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd5\xb3\xe8\x00\xd8\xad\xe3\
\x00\xd7\xaf\xe4\x04\xd8\xae\xe4\x18\xd8\xaf\xe5\x49\xd7\xaf\xe5\
\x98\xd7\xb0\xe6\xd7\xd7\xb1\xe7\xb8\xd6\xb2\xe8\x46\xd6\xb3\xe9\
\x03\xd7\xb0\xe6\x00\xd7\xb1\xe7\x8e\xd7\xb0\xe6\xa2\xd8\xad\xe2\
\x01\xd7\xaf\xe5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd5\xb4\xea\x00\xd5\xb4\xea\x46\xd5\xb4\xea\
\xd4\xd5\xb5\xea\x23\xd5\xb5\xea\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd6\xb2\xe8\x00\xd6\xb3\xe8\
\x02\xd6\xb2\xe8\x29\xd6\xb3\xe9\x91\xd5\xb4\xea\xdb\xd5\xb5\xeb\
\x56\xd5\xb6\xeb\x09\xd5\xb4\xea\xbd\xd5\xb4\xea\x72\xd5\xb5\xea\
\x00\xd4\xb6\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd4\xb7\xec\x00\xd4\xb7\xec\x15\xd4\xb8\xed\
\xc8\xd4\xb9\xee\x6b\xd4\xb8\xec\x00\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd5\xb6\xeb\x00\xd5\xb6\xeb\x05\xd4\xb6\xec\x42\xd4\xb6\xec\
\x2c\xd3\xb9\xee\x40\xd4\xb8\xed\xd7\xd4\xb7\xed\x31\xd4\xb8\xed\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x74\xd3\xba\xef\xca\xd3\xba\xef\x1e\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x0b\xd3\xba\xef\xab\xd3\xba\xef\x9e\xd3\xba\xef\x05\xd3\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x16\xd3\xba\xef\xbe\xd3\xba\xef\x9a\xd3\xba\xef\x0b\xd3\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\x02\xd3\xba\xef\
\x73\xd3\xba\xef\xd5\xd3\xba\xef\x2f\xd3\xba\xef\x00\xd3\xbb\xf0\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x00\xd3\xba\xef\x37\xd3\xba\xef\xd4\xd3\xba\xef\x90\xd3\xba\xef\
\x11\xd3\xba\xef\x00\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x07\xd3\xba\xef\x6d\xd3\xba\xef\
\xdd\xd3\xba\xef\x59\xd3\xba\xef\x00\xd3\xba\xef\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x3f\xd3\xba\xef\xcc\xd3\xba\xef\
\xb6\xd3\xba\xef\x40\xd3\xba\xef\x08\xd3\xba\xef\x00\xd2\xb9\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x04\xd3\xba\xef\x2e\xd3\xba\xef\x9d\xd3\xba\xef\xd9\xd3\xba\xef\
\x5d\xd3\xba\xef\x02\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xd3\xba\xef\x00\xd3\xba\xef\x00\xd3\xba\xef\x25\xd3\xba\xef\
\x99\xd3\xba\xef\xd8\xd3\xba\xef\xb1\xd3\xba\xef\x72\xd3\xba\xef\
\x4b\xd3\xba\xef\x3e\xd3\xba\xef\x47\xd3\xba\xef\x68\xd3\xba\xef\
\xa3\xd3\xba\xef\xd6\xd3\xba\xef\xaf\xd3\xba\xef\x39\xd3\xbb\xef\
\x01\xd3\xba\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xba\xef\x00\xd3\xba\xef\
\x06\xd3\xba\xef\x3f\xd3\xba\xef\x97\xd3\xba\xef\xd4\xd3\xba\xef\
\xec\xd3\xba\xef\xf0\xd3\xba\xef\xee\xd3\xba\xef\xdb\xd3\xba\xef\
\xa7\xd3\xba\xef\x52\xd3\xba\xef\x0d\xd3\xba\xef\x00\xd2\xba\xef\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xff\xff\xf0\x1f\xff\xff\x00\x00\xff\xff\xf0\
\x1f\xff\xff\x00\x00\xff\xff\xe2\x07\xff\xff\x00\x00\xff\xff\xe1\
\x87\xff\xff\x00\x00\xe0\x00\x00\x87\xff\xff\x00\x00\xe0\x00\x00\
\x43\xff\xff\x00\x00\xe3\xff\xfc\x21\xff\xff\x00\x00\xe3\xff\xfe\
\x10\xf1\xff\x00\x00\xe3\xff\xff\x08\x40\xff\x00\x00\xe3\xff\xff\
\x84\x00\x7f\x00\x00\xe3\xff\xff\xc2\x04\x3f\x00\x00\xf1\xff\xff\
\xe1\x1e\x3f\x00\x00\xf0\xff\xff\xf0\xf0\x1f\x00\x00\xf0\x3f\xff\
\xf8\x40\x0f\x00\x00\xf8\x07\x83\xc0\xc0\x07\x00\x00\xfe\x00\x00\
\x01\x80\x07\x00\x00\xff\xc0\x00\x01\x80\x0f\x00\x00\xff\xf8\x7c\
\x23\x00\x1f\x00\x00\xff\xfc\x00\x61\x00\x7f\x00\x00\xff\xfc\x00\
\x70\x00\xff\x00\x00\xff\xf8\x00\x38\x01\xff\x00\x00\xff\xf0\x7c\
\x1e\x03\xff\x00\x00\xff\xe1\xff\x0f\x03\xff\x00\x00\xff\xe3\xff\
\x87\x87\xff\x00\x00\xff\xc7\xff\xc7\xff\xff\x00\x00\xff\xc7\xff\
\xc3\xff\xff\x00\x00\xff\x8f\xff\xe3\xff\xff\x00\x00\xff\x8f\xff\
\xe3\xff\xff\x00\x00\xff\x0f\xff\xe1\xff\xff\x00\x00\xfe\x0f\xff\
\xf0\xff\xff\x00\x00\xfe\x3f\xff\xf8\xff\xff\x00\x00\xfe\x0f\xff\
\xe0\xff\xff\x00\x00\xfe\x07\xfc\x00\xff\xff\x00\x00\xff\x07\xe0\
\x01\xff\xff\x00\x00\xff\x87\x80\x01\xff\xff\x00\x00\xff\x87\x00\
\x01\xff\xff\x00\x00\xff\x80\x00\x31\xff\xff\x00\x00\xff\x80\x00\
\xf1\xff\xff\x00\x00\xff\x80\x00\x71\xff\xff\x00\x00\xff\x8f\xe0\
\x11\xff\xff\x00\x00\xff\x8f\xfc\x03\xff\xff\x00\x00\xff\x8f\xff\
\x03\xff\xff\x00\x00\xff\xc7\xff\xc3\xff\xff\x00\x00\xff\xc3\xff\
\x87\xff\xff\x00\x00\xff\xe1\xff\x0f\xff\xff\x00\x00\xff\xf0\x7c\
\x0f\xff\xff\x00\x00\xff\xf8\x00\x1f\xff\xff\x00\x00\xff\xfc\x00\
\x7f\xff\xff\x00\x00\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\
\x0d\x49\x48\x44\x52\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\
\x00\x00\x5c\x72\xa8\x66\x00\x00\x56\x68\x49\x44\x41\x54\x78\xda\
\xed\x9d\x77\x9c\x1d\x65\xf5\x87\x9f\x33\x73\xef\x96\xf4\x46\x49\
\xe8\x35\xb4\x08\x08\x16\x50\x51\x10\x50\x7f\xa0\xa2\xf4\x92\x42\
\x0a\xbd\x23\x22\x0a\x22\x20\x20\xbd\x77\x42\xb2\x09\xbd\x37\x01\
\xb1\x60\x01\x04\x15\x05\x02\x84\x04\x08\x10\x12\x92\x90\xb2\xbd\
\xde\x3b\xef\xf7\xf7\xc7\x7b\x37\x65\xf7\xde\xcd\xee\x66\x77\xef\
\x96\x79\x3e\x9f\x61\x37\xec\xdc\xb9\xef\xbc\x33\xe7\xbc\xe7\x3d\
\xe7\xbc\xe7\x35\x62\xba\x84\xc5\xcf\xbc\xcc\xa0\x81\xa3\x68\x68\
\xa8\x2c\x70\x04\x83\x0d\x36\x04\x46\x03\xdb\x00\x9b\x00\x1b\x03\
\x1b\x00\x43\x81\x21\x40\x3f\x20\x99\x39\xd2\x40\x03\x50\x07\x94\
\x01\xa5\xc0\x17\xc0\x02\xe0\x33\x19\x1f\x9a\x98\x03\x7c\x2e\xa3\
\x6c\xe1\xc8\x8a\xfa\xf5\x97\xf5\x67\x83\xef\xec\x92\xef\xdb\x8e\
\xe9\xe6\x58\xbe\x1b\xd0\x9b\x29\xfd\xe3\x2c\x80\x10\x31\x14\x2f\
\xec\xbb\x01\x63\x32\xc7\xc6\xc0\x40\xbc\xa0\x27\xd6\xe1\x6b\x22\
\xa0\x06\xa8\x02\x3e\x07\xde\x01\xde\x06\xfe\x07\xbc\x87\xb1\x1c\
\x48\x0f\xdd\x6f\x4c\xbe\xbb\x23\xa6\x1b\x12\x2b\x80\x0e\xa6\xf4\
\x0f\x6f\x03\x4a\xc8\x6c\x84\x61\xbb\x01\xfb\x02\xdf\x00\xb6\x02\
\x06\x03\x61\x17\x34\xc3\x01\x95\xc0\x3c\xe0\x75\xe0\xaf\xc0\x3f\
\x0d\x16\x01\xa9\x21\xfb\xc7\xca\x20\xc6\x13\x2b\x80\x0e\xa0\xf4\
\x4f\xb3\xb0\x42\x43\x35\x1a\x88\x69\x0c\xd8\x8f\x81\xef\xe1\x85\
\x7e\x40\xbe\xdb\x87\x9f\x3a\x7c\x0c\xfc\x05\x78\x0e\xf1\xcf\x74\
\x94\x28\x0d\x02\xc7\xf0\xef\xef\x98\xef\xb6\xc5\xe4\x91\x58\x01\
\xac\x03\x65\x2f\xce\x22\x94\x2c\x15\x04\xeb\x99\xf4\x5d\xe0\x28\
\x60\x0f\x60\x18\xdd\xb7\x6f\x2b\x80\x7f\x03\x8f\x02\xbf\x6f\x70\
\x6e\x41\xc2\x4c\xc3\xbf\xf7\xa5\x7c\xb7\x2b\x26\x0f\x74\xd7\x97\
\xb4\x5b\x53\xfa\x87\x59\xe0\x30\x02\x36\xc4\xf8\x31\x30\x0e\xd8\
\x05\x28\xee\x80\xcb\x0b\xef\xf4\x8b\xf0\xa6\xbc\xf0\xcf\xc9\xf0\
\xbe\x82\x04\x1d\xf3\xdc\x52\x78\x7f\xc1\x03\x60\x0f\x83\x9b\x0f\
\xa6\xa1\xf1\xf4\xa0\x4f\x11\x2b\x80\x36\x50\xf9\xa7\x77\xa9\x4c\
\xd5\xd2\x2f\x51\x30\x54\xb2\x9f\x00\x53\x80\x5d\x81\xc2\x76\x5c\
\xae\x0e\x3f\x4f\xff\x02\xf8\x04\xef\xc0\x5b\x8c\xf7\xf0\x97\x01\
\x35\x60\x0d\xa0\x48\x10\x9a\x8f\x06\xf4\xc3\xfb\x11\x86\xe2\x23\
\x06\xa3\x80\xcd\xf0\x11\x85\x41\xb4\x4f\x01\xa5\xf1\x8a\xe0\x1e\
\xd0\xc3\x43\x36\xdb\x72\x49\xc5\x67\x9f\x32\x78\xdf\x78\x6a\xd0\
\x17\x88\x15\x40\x2b\x29\x7b\x71\x16\x40\xa1\xe0\x3b\xc0\x69\xc0\
\xde\xb4\x4d\xe0\x1a\xf0\xc2\x3e\x0b\x78\x13\xef\xa9\x9f\x83\xd9\
\xe7\x26\x55\x9b\x54\x97\x0c\xc2\x74\x24\xc7\xc0\x16\x46\xe1\x25\
\x7f\x7d\x83\xf5\x1f\xfb\x32\x2b\x0e\x7c\x37\x0c\x70\x45\x98\xf5\
\x07\x36\x00\x8d\x06\x1b\x83\xb7\x44\xbe\x84\x57\x0a\x45\x6d\x6c\
\xdf\xab\xc0\x4d\x82\x17\x80\x9a\x61\xb1\x35\xd0\xeb\x89\x15\xc0\
\x5a\x58\xf1\xe2\xdb\xd4\x59\x3d\xc5\x2a\xda\x02\x38\x05\x18\x0f\
\x0c\x6f\xe5\xc7\x1b\x80\xcf\x80\x97\x81\x97\x40\xff\xc2\xf8\x2c\
\x1d\xaa\x3a\x70\xa6\xe1\xfb\x76\xdc\xbc\x7b\xf9\x8b\xb3\x10\x58\
\xe8\xad\x84\x51\xf8\x90\xe3\xb7\x80\x6f\x03\x5b\xd3\x7a\x2b\xa5\
\x02\x78\xc8\xc4\xf5\x91\xd3\xec\x20\x30\x0d\xfd\x5e\xac\x08\x7a\
\x2b\xb1\x02\x68\x81\x15\x2f\xce\x42\x58\x41\x80\xfe\x0f\xf8\x15\
\xf0\x65\x20\x68\xcd\x47\xf1\x61\xb7\x27\x05\x2f\xe1\xf4\x99\x8a\
\x0a\x1a\x86\x5e\xf4\x30\xf6\xd2\xaf\xbb\xa4\xed\xa5\x7f\x7c\x07\
\x8c\x84\x1c\x23\x0d\xed\x05\xfc\x10\x6f\xbd\xac\xcf\xda\x9f\xbb\
\x80\xd9\xc0\x15\x78\x67\x61\x4d\xec\x1b\xe8\x9d\xc4\x0a\x20\x0b\
\xa5\x7f\xf9\x00\xd2\xb5\x60\xc1\xfa\x48\xa7\x03\x27\xe0\x3d\xfb\
\x2d\x21\xfc\x3c\xfe\xf7\xc0\x03\xc0\x1b\x49\x52\x95\x11\x21\x83\
\xf7\xdf\x39\x6f\xf7\x52\xf9\xdc\xff\xa8\x2d\x2a\x26\x99\x4e\x17\
\x83\x76\x06\x0e\x07\x7e\x04\x6c\xce\xda\x95\x59\x25\x50\x22\xb3\
\xab\xa2\x88\xf9\x05\x09\x31\x78\xdf\x58\x11\xf4\x26\x62\x05\xd0\
\x84\xb2\x17\x67\x31\x38\x6a\xa0\x2c\x2c\xd8\x09\xf8\x2d\x70\x00\
\x6b\xcf\xd4\x5b\x04\x3c\x01\x2a\x71\xf0\xa6\x41\xc3\xb0\xfd\xbb\
\x5f\x58\xad\xf4\xc5\xb7\xf1\xf7\x62\xdb\xe0\x23\x17\x47\x02\x9b\
\xd2\xf2\x7b\xe0\x80\xbf\x63\x9c\x5f\x34\x20\xf5\x6a\x7d\x4d\x42\
\x43\xbe\xdb\xfd\xee\x2d\xa6\x7d\xc4\x0a\x60\x35\x4a\xff\xf8\x36\
\x0a\x9c\x59\x14\xee\x0f\xfc\x0e\xef\x50\x6b\x89\x2a\xe0\x19\xc4\
\xcd\xa0\xff\x80\x35\xf4\x84\xf9\x72\xe9\x8b\x6f\x61\x28\x21\xc2\
\x31\x78\xeb\xe6\x10\xd6\x6e\xe1\x7c\x08\xfc\xca\x39\x3d\x6e\x46\
\x7a\x58\x9c\x37\xd0\x2b\x88\x15\x40\x86\xb2\x3f\xbe\x83\x19\x49\
\xe7\x74\x04\x70\x29\x7e\x81\x4e\x2e\x1c\xf0\x5f\xc1\xd5\x06\xcf\
\x02\xd5\x3d\x71\x8e\x5c\xfa\x87\x59\x80\x15\x61\x6e\x1f\xb0\xb3\
\x81\xbd\x68\xd9\xda\x59\x0a\x5c\x22\xb8\xdb\xa0\xb6\x27\xde\x73\
\xcc\x9a\xc4\x0a\x80\x46\x41\xa0\x00\x63\x0a\x70\x11\x2d\x7b\xf9\
\xcb\x81\xe9\x18\x37\xd4\x45\xc5\x1f\x17\x06\x75\x0c\xdb\x7f\xa7\
\x7c\xdf\x42\xbb\x29\x7f\xf1\x2d\xea\x82\x22\x92\x6a\xd8\xd0\xc4\
\x09\xc0\x89\x78\x47\x61\x2e\x2a\x81\x6b\x84\x5d\x03\x54\xf5\xe4\
\x7b\x8f\x89\x15\x00\xa5\x2f\xce\x02\x51\x84\x71\x32\x70\x3e\x7e\
\x29\x6e\x2e\xde\x05\x2e\x95\x78\x12\xa8\x1d\xd6\x03\xcc\xfd\xb6\
\xf4\x83\x41\x52\xb0\x1f\x70\x21\xf0\x15\x72\xbf\x1f\xb5\xc0\x0d\
\x66\x5c\x06\x54\x0e\x89\x57\x1a\xf6\x58\x5a\x13\xd2\xea\xb5\x94\
\xbe\x38\x0b\x19\x05\x18\x27\x02\xbf\x26\xb7\xf0\x47\xf8\xe4\x98\
\x71\xf5\xdb\x25\x1f\xb0\xa0\x77\x09\x3f\xc0\xd0\xfd\xc7\x20\x59\
\x4a\xfd\xcb\x9e\x03\x8e\xc1\x47\x32\x1a\x72\x9c\x5e\x0c\x9c\x21\
\xf1\x0b\x39\xf5\xf7\x2b\x20\x63\x7a\x22\x7d\xd6\x02\x28\xfd\xc3\
\x2c\x1c\x16\x06\xa6\xe3\x80\xcb\xc8\x2d\xfc\x75\xc0\x74\x89\x8b\
\x15\xba\x45\x41\x7d\x82\xa1\x07\xf4\xee\x34\xd9\x15\x5e\x31\x0e\
\x0d\xc4\x39\xf8\xe4\xa7\x81\x39\x4e\xad\x01\x2e\xc7\xb8\x0a\x51\
\x1f\xfb\x04\x7a\x1e\x7d\x52\x01\xac\xf8\xd3\x3b\x44\x45\xa1\x25\
\x6a\xd2\x87\x03\x37\x90\x7b\xce\x5b\x09\x5c\x8b\xec\x5a\x50\x45\
\x4f\xf0\xf0\x77\x14\x19\x07\x61\x31\xa6\x29\x78\xeb\x28\x97\x5f\
\xa4\x02\xf8\x85\x8c\x3b\x11\x51\x9c\x3e\xdc\xb3\xe8\x73\x53\x80\
\xb2\xe7\xde\x85\x30\x22\x51\x93\xde\x1b\x3f\xf2\xe7\x12\xfe\x32\
\xe0\x22\xc4\xef\xfa\x9a\xf0\x03\x0c\xfd\xde\x18\x2c\x70\xb5\x66\
\xee\x56\xe0\x1c\x60\x49\x8e\x53\x07\x01\x17\x06\xe2\xa0\xa1\x2f\
\xee\x44\xe9\x1f\xe3\xe9\x40\x4f\xa2\xcf\x59\x00\xa5\x7e\x51\xcf\
\xb6\xc0\x4c\xe0\xab\x39\x4e\x2b\x07\x2e\x34\xd9\x6d\x32\x1a\x86\
\xf6\x61\x4f\xf7\x8a\x17\x67\xe1\x2c\x08\x43\xb9\x23\x81\xab\xf1\
\xab\x10\xb3\xf1\x3e\x30\xce\xe0\xdf\x11\x8e\xe1\x79\xcc\x7e\x8c\
\x69\x3d\x7d\xca\x02\xf0\xb9\xfd\x0c\xc3\x87\xfa\x72\x09\x7f\x15\
\x70\x29\xc6\xed\x32\xf5\x69\xe1\x07\x18\xb6\xff\x18\x42\xe7\x22\
\x4b\xe9\x7e\xe0\x97\xc0\xf2\x1c\xa7\x6e\x07\xfc\x56\xd2\x46\x41\
\xdf\x1b\x57\x7a\x2c\x7d\x46\x01\x94\xbf\xf8\x0e\x49\x2c\x34\x38\
\x1e\x38\x38\xc7\x69\x0d\xc0\x0d\x98\xdd\x0c\xb1\x53\xab\x91\xa1\
\xdf\x1b\x83\x2b\x08\x5c\x64\xc1\x0c\x7c\x92\x54\x75\x8e\x53\xf7\
\xc5\xec\x4c\x4c\x85\x99\xb4\xe3\x98\x6e\x4e\x9f\x50\x00\x65\x7f\
\x7e\x07\xe7\x1c\x69\xb4\x0f\x70\x2a\xbe\xb8\x46\x53\x04\xdc\x6b\
\xe2\x6a\xa4\xda\xb8\x8a\xee\x9a\x0c\xdb\x6f\x27\x42\x29\x0d\x76\
\x07\x70\x2b\xbe\x90\x48\x53\x02\x60\x12\x0a\x0e\x0a\x0a\x60\xc5\
\x0b\xb3\xf2\xdd\xec\x98\xb5\xd0\x27\x14\x80\x9c\x20\x0c\x46\x02\
\xe7\x01\x23\x73\x9c\xf6\x12\x66\x17\x29\xb0\xb2\x78\xe4\xcf\x8e\
\x9f\x0e\xa9\x06\xe3\x4a\xe0\x89\x1c\xa7\x0d\x01\xce\x8d\x1a\x6c\
\x1b\xeb\x13\x6f\x57\xcf\xa6\xd7\x3f\xa2\x15\x7f\x78\x07\x45\x16\
\x22\x4d\xc1\xe7\xba\x67\xe3\x43\x13\xbf\x84\x68\x7e\x10\x75\x45\
\xd5\xee\x9e\xcb\x90\x11\xf3\x91\x6c\x19\x3e\x5b\xf0\x7f\x39\x4e\
\xdb\xc5\xe0\x14\x64\x05\xa5\x2f\xbe\x93\xef\x26\xc7\xb4\x40\xaf\
\x57\x00\x66\xc2\x02\x7d\x05\x98\x4c\xf6\x9a\xfc\xd5\xc0\x95\xf4\
\x1b\xf0\x3a\x84\x0c\xfe\xfe\xf6\xf9\x6e\x72\xb7\xc6\xbe\x7c\x00\
\xce\x8c\x02\x97\x9e\x0d\x5c\x42\x76\xa7\xa0\x01\x47\x9b\x69\x9f\
\x40\x8e\xa5\x7f\x99\x9d\xef\x66\xc7\xe4\xa0\x57\x2b\x80\x4c\xc8\
\xaf\x1f\x70\x32\xb9\x57\xf7\x3d\x84\x71\xbf\x6a\xab\x18\xba\x5f\
\xdf\xf6\xf8\xb7\x96\x11\xfb\xed\x48\x43\x98\x40\xa1\x3d\x0b\xdc\
\x8d\x5f\x1d\xd9\x94\xe1\x82\xd3\x14\xd8\xb0\x44\x94\x6e\xe3\x37\
\xc4\x74\x15\xbd\x56\x01\x64\x8a\x78\x82\xdf\x99\xe7\x87\x39\x4e\
\x9b\x6d\x70\x1d\xea\x99\xcb\x79\xf3\xc9\xd0\xfd\xc6\x10\x44\x2e\
\x65\xa6\xdb\x80\x7f\xe5\x38\x6d\x1f\x89\x83\xc2\xc0\xb1\xfc\xf9\
\x77\xf3\xdd\xe4\x98\x2c\xf4\x5a\x05\x00\x60\x3e\x4b\x6d\x0a\xbe\
\x94\x76\x53\xea\x81\x9b\x06\x7f\x3e\xf2\x1d\x99\xf2\xdd\xd4\x9e\
\x49\x02\x5c\x2a\xf8\x14\xb8\x06\x9f\x12\xdc\x94\x42\x60\x52\x94\
\x0e\x36\x0c\x02\xd7\xb6\x6b\xc7\x74\x09\xbd\x52\x01\x94\xbd\xf8\
\x26\xc2\xc8\x2c\x6d\xdd\x3b\xc7\x69\x7f\x35\xd9\xc3\x65\x23\x17\
\x33\x6c\xbf\xb8\xba\x4d\x7b\x18\xb2\xcf\x97\xb0\x84\x03\xf4\x1c\
\xf0\x4c\x8e\xd3\x76\xc7\xf8\xb1\x05\x46\xbc\x6a\xb0\xfb\xd1\x2b\
\x15\x00\x0a\x30\x45\x03\x80\xb1\x40\xff\x2c\x67\x94\x23\x6e\x11\
\x6e\x39\x16\x7b\xfd\xd7\x89\xd7\x5e\x02\x82\x1a\xc4\xad\xf8\xda\
\x88\x4d\x29\x00\x8e\x96\xdc\x88\x38\x41\xb0\xfb\xd1\x2b\x15\x80\
\x30\x64\xc1\xd7\x11\xdf\xce\x71\xca\x8b\x66\xf6\x17\xcc\x18\xba\
\x7f\xec\xf5\x5f\x17\x86\xfe\xfa\x74\x24\xe1\xe0\xdf\x28\x47\x6e\
\x80\xd8\x1d\xd9\xbe\x26\x58\xf6\xe7\x38\x2c\xd8\x9d\xe8\x75\x0a\
\x60\xc5\x0b\xb3\x90\x94\x94\x38\x54\x30\x44\x82\x26\x47\xb9\xc4\
\x3d\x0e\x55\x53\x1f\x8f\xfe\x1d\xc1\xb0\xef\x8d\xc1\x20\x25\x51\
\x22\xf1\x79\xb3\x3e\x87\x62\xc1\x61\x11\xd6\x2f\x48\xc7\xfe\x96\
\xee\xc4\xda\xca\x5d\xf7\x38\x84\x81\x05\x5b\x21\xed\x6b\xf8\xfc\
\xde\xd5\x31\xf8\x9b\x61\x2f\x4b\x30\xf4\x87\x3b\xe4\xbb\xb9\xdd\
\x9a\xb2\x3f\xbe\x47\x98\x08\x68\x68\x88\xfa\x81\x2b\x42\x56\x95\
\x28\x70\x0d\x27\xef\x37\x86\x7b\xa3\x35\xed\x79\xc9\xc0\x78\x13\
\xf1\xbc\xc1\xa4\x2c\xfd\xfe\x0d\xf3\x5b\x96\xbd\x96\xef\xfb\x8a\
\x59\x45\xaf\xb2\x00\x96\x3d\xf7\x3e\x66\x0e\xc4\x7e\xc0\x66\x59\
\xc6\x9a\x3a\xc1\x83\x4e\xae\x4a\x61\xaf\xba\xf5\x0e\x67\xe9\x0b\
\xef\x92\x8a\x54\xd0\x50\x97\xfe\x01\x4e\xd3\x70\xf6\x2c\xe2\xd6\
\x54\x7d\xb8\xfb\x89\xe9\xa1\x2c\x7f\x66\xcd\xe4\x9e\xe1\xdf\xdf\
\x09\x44\x03\xf0\x90\xa0\xac\xa9\xe6\x95\x58\x5f\x70\x80\x1b\x59\
\xc8\xf2\x3f\xc4\xd3\x80\xee\x42\x2f\xb3\x00\xd2\xb8\x28\x18\x80\
\xe9\x7b\x64\xcf\xfa\x7b\xd7\xb0\xbf\x02\x0c\xdf\x2f\x9e\xfb\x67\
\xa3\xf2\xb9\xb7\xa9\xb6\x90\xc0\x69\x3d\x19\x27\xcb\x27\x51\x8d\
\xc8\xfc\x79\x0f\xd0\x97\xb6\x7b\xfe\xe5\xa3\x15\x46\x1f\x34\xfb\
\xb0\x04\xf0\x2f\x61\xff\x02\xf6\x6f\x66\x7e\x89\x7d\x59\x58\x7f\
\xa3\x7c\x79\xf1\x98\x6e\x40\x6f\x1c\x06\x47\x23\x76\x97\x8c\xd5\
\x0f\xfc\xf1\xdc\xb0\xef\xa7\x17\xc9\x62\x77\x74\x36\xbe\x78\x7e\
\x36\xb5\x41\x61\x18\xc2\x1e\x0e\x9b\x26\xf1\x4b\xc4\x88\x55\xfd\
\x08\xc8\x76\x13\xb6\xbf\x0c\x74\xfb\xc2\x35\x3e\xaf\xd0\x70\x51\
\x50\x8e\xec\x19\x64\x51\xd3\xfe\x97\x6c\x07\x61\xbb\x2a\x0e\x07\
\x74\x1b\x7a\x8d\x02\x58\xf0\xdc\xfb\x98\x40\xd8\x37\x85\xad\xd7\
\xf4\xef\x82\xe5\x82\x17\x97\x3d\x9f\x60\xc4\xf7\x7b\x77\x51\xcf\
\xb6\xb2\xf8\xb9\x59\x54\xbc\xf0\x21\xa0\xf5\x2d\x4a\x9f\x83\xe3\
\x41\xe0\x00\xb0\xe4\x9a\xc2\x6a\x08\x02\xc1\x50\x39\x83\x1d\xd7\
\x5c\x06\x30\x62\xff\x9d\x20\x00\x61\x7f\x11\xac\xa1\x1d\x32\xc6\
\xc0\x20\xc4\x5e\xa2\x8e\xa5\xbf\x7f\x2f\xdf\xb7\x1d\x43\x2f\x9a\
\x02\x24\x9d\x23\x4d\x50\x6c\xd2\x9e\x64\x53\x6c\xc6\x5b\x40\x3c\
\xf9\x6c\xc2\x17\xcf\xbe\x0b\x8e\xe2\x3a\x57\xbf\x37\xc6\x99\x82\
\xbd\x10\x05\xe4\x72\xd6\x1b\xa5\xc0\x1b\x00\x7c\xb3\x79\xfa\xb4\
\x64\x00\x1f\x03\xaf\x21\x36\xcd\xf2\xf9\x3d\x70\x45\x83\x65\x2a\
\xcf\xf7\xbd\xc7\xf4\x22\x05\x90\x19\x63\x36\x10\xec\x96\xcd\xfb\
\x8f\xf8\x87\x99\xca\xe4\x62\xf3\x13\x60\xf1\x33\xef\x01\x24\x24\
\x76\xc5\x74\x22\x70\x90\x89\xa1\x40\x0b\xb2\x4f\x05\xe2\x06\xb0\
\x97\x00\x2c\xcb\x54\x2a\xac\x2b\x20\x5d\x54\x57\x6b\xce\xfe\x86\
\x71\xb0\x9a\xf8\x62\x4c\xec\x80\xb4\x39\xd8\x5b\xf9\xee\x83\x98\
\xde\xa4\x00\xfc\xc8\xb3\x23\x5e\x09\x34\xa5\x02\xf8\xa7\x64\x6c\
\xd0\xc7\x43\x7f\x4b\x9e\x9e\x0d\x58\x02\xdc\xf6\xc0\x78\xe0\x30\
\x64\x9b\x40\x6e\xc1\xcf\x30\x47\x70\x19\xc6\x23\x40\xdd\x06\x07\
\x66\xef\xc7\xe1\x07\x6f\xcd\x92\x67\xde\x43\xf0\x3a\x62\x39\x4d\
\xaa\x2e\xfb\x9a\x8c\xb6\x0b\x22\x56\x00\xdd\x80\x5e\xa1\x00\x96\
\x3e\xf3\x3e\x69\x45\x18\xf6\x25\x60\xc0\x1a\xd3\x56\x01\xc6\x42\
\xfa\xb8\xf9\xbf\xf8\xe9\x77\x91\x54\x28\xb4\x13\xe8\x48\xc4\xc1\
\x18\x9b\x91\xab\x32\xf4\x2a\x6d\x50\x87\xf1\x2c\xd8\x65\x05\x05\
\xc1\x9b\xa9\x94\xd3\x06\x07\xb6\x2a\x82\x32\x4f\x30\x97\xe6\x65\
\xd7\x0b\x80\x9d\x1b\x28\xb0\xc5\xcf\xcc\xd6\x86\x3f\x8c\xa3\x31\
\xf9\xa4\x57\x28\x80\xc8\x39\x0c\x2b\xc4\xf0\xc3\x52\xf3\x18\xf4\
\x3b\x98\xad\x58\xdb\x10\xd7\xdb\x58\xf4\xcc\x1c\x2c\x91\x44\x0d\
\xf5\x83\x84\xbe\x82\xd9\x11\x82\xff\x33\x5f\x16\xcd\x72\xf5\x47\
\xe3\xff\x36\xbf\x25\xf8\x0d\x48\xf7\x22\x95\xd5\x99\x18\x75\x60\
\x2b\x2c\x28\x41\x18\x58\x79\xda\xe9\x2d\xe0\x9b\x59\xfe\xbc\x63\
\x92\x86\x81\xca\xbe\x82\x30\xa6\x0b\xe9\x15\x0a\x20\xe3\xa9\x1e\
\x80\xd8\x36\xc7\x29\xb3\xc0\xd5\xe2\x7a\xc5\xed\xae\x95\xcf\x9f\
\x7a\x1f\x8c\x84\x9c\x46\xd1\x50\xbf\x1f\xf0\x53\x64\x7b\x40\xcb\
\x73\xfc\xd5\xa8\x00\x9e\x10\x5c\x5f\x6f\x89\xb7\x0a\x14\x69\xd4\
\x8f\xb7\x6b\xf5\xf7\x27\x8a\xd3\xd4\xd7\x24\x9d\xc1\x2c\xb0\x88\
\xe6\x39\x19\x5b\xe2\x97\x6a\xc7\x0a\x20\xcf\xf4\x0a\x89\xc8\xe4\
\x9b\x0f\x37\xd8\xb8\x99\x41\x2b\xea\x80\xb9\x28\x60\xd4\x41\xa3\
\xf3\xdd\xd4\x4e\x63\xd1\x33\xef\x61\xa1\x02\xd7\x10\x0c\x41\xda\
\x15\x71\x00\xb0\xbf\x8c\x6d\xf0\x66\x77\xcb\xf8\xa9\x52\x03\x3e\
\x55\xf7\x06\x83\x17\x04\x35\x9b\xff\x68\x9b\x36\xb7\x65\xf8\x7e\
\x5f\xe2\xf3\xa7\x66\x23\x6c\x2e\x7e\x7b\xb5\x21\x4d\xbe\x6b\x18\
\xbe\x42\xd3\x82\x7c\xf7\x5b\x5f\xa7\x57\x28\x00\x04\x06\x9b\x02\
\xfd\xb3\x0c\x6f\x55\x78\x53\xb6\xd7\xb1\xe8\xf1\xf7\x30\x11\x44\
\x01\x43\x5c\xda\x76\x22\x6d\xfb\x02\xfb\x00\x3b\x99\x31\x08\x30\
\x5a\xa8\xc3\xb1\x9a\xa9\x9f\xc6\x98\x85\x98\x2a\xf4\x68\x50\xd0\
\x7f\x89\x52\x75\x8c\xfa\xf1\x3a\x28\x4c\x07\xc2\x3e\x31\x54\x4e\
\x13\x05\x20\x5f\xa6\x6d\x33\xe0\x9f\xf9\xee\xc3\xbe\x4e\xaf\x50\
\x00\x99\x17\x79\x24\x50\xdc\xdc\xa7\xa5\x72\x60\x71\xbe\xdb\xd8\
\x11\x2c\x7b\xfa\x1d\x66\x95\x0e\x65\x9b\x41\x15\x05\x92\xad\x1f\
\x19\xbb\x98\xf1\x2d\xfc\x3c\x7b\x7b\x60\x48\x63\x38\x24\x13\x8f\
\x5f\x5b\xcf\xa5\x81\xd9\x82\x7b\x91\x3d\x98\x96\xfb\x2c\x30\xd3\
\x46\xff\xb7\xd9\x3a\xb7\x35\x33\x2d\xab\x10\xb6\x08\xb4\x59\x93\
\xe7\x52\x04\x8c\x02\xf1\xde\xbd\x9f\xb1\xc3\x31\x9b\xb4\xeb\x3b\
\x62\xd6\x9d\x1e\xaf\x00\xaa\xee\x9b\x45\x99\x7f\xd9\x47\x92\xd5\
\xd4\xb5\x45\xf8\x6d\xac\x7b\x1c\x4b\x7f\xff\x01\x83\x06\x16\xb1\
\x6c\x45\x55\xa1\x9c\x0d\xa9\x4b\xb3\xe5\x36\x03\x2b\x77\x43\xf6\
\x15\x83\xdd\x11\x9b\x0a\x06\x34\xb9\xdf\xd6\x5c\xba\x1e\x78\x1b\
\xec\x01\x33\x9e\x88\x2c\xf8\x34\x40\xda\xec\x27\x1d\x17\x22\xf5\
\xe9\xbf\xd4\x62\xfa\x3c\x47\x9b\x46\x16\x39\x31\xa0\x7f\x8f\x7c\
\x34\xbd\x86\x1e\xaf\x00\x56\x14\x87\x24\x14\x10\x99\x1b\xd6\x7c\
\xfa\x0f\x06\x4b\xc0\xea\xf3\xdd\xce\xd6\xf0\xd9\x63\xb3\xc1\x14\
\x98\x82\x7e\x92\x0d\xad\xab\x8b\xb6\xa9\xab\xab\xde\x05\x6c\x57\
\xfc\x52\xda\x8d\x0d\x06\x61\xad\x7f\x6e\x5a\x6d\x4a\x64\x46\x19\
\xf0\x3a\xf0\x20\xf0\x07\x88\x16\x4b\x81\x36\xfd\xc9\xb6\xad\xbd\
\x5c\xeb\x31\x07\xa1\xab\x27\x0a\x97\x34\x3e\x88\x26\x0c\xab\x0b\
\xc2\x64\x20\xa5\xba\xb4\x93\x63\xd6\xa0\xc7\x2b\x00\x08\x48\x25\
\x5c\x18\xa4\x19\x92\xcd\xbb\x2d\x28\x13\x6a\xe8\x4e\xf9\x7f\x9f\
\x3c\x3c\x07\xe7\x20\x11\xaa\x00\x0b\xfa\x61\x1a\x6a\x68\x0b\x60\
\x6b\x64\xdb\x09\x46\x63\xda\x16\x58\x0f\x3f\xc2\xaf\xf4\xa2\x6b\
\xe5\x7f\x5a\x4d\x0a\xf8\x04\xf8\xb3\xc4\x63\x06\xff\xb6\x30\x59\
\x2e\x17\xb1\xf1\x4f\x5b\xef\xd9\x6f\x2b\x69\x83\x64\x7d\x22\x72\
\xa1\xca\x56\x35\x7c\x0d\x86\x20\x92\x60\xb1\x02\xc8\x23\x3d\x5f\
\x01\xb8\x00\x73\x24\x84\x06\xe6\x38\xa3\x7c\x40\xe5\x88\x74\xe5\
\xa0\x2f\xba\xb4\x59\x9f\x3e\xfc\x3e\x88\x80\x40\x05\x41\x10\x14\
\x0a\x2b\x42\x1a\x8e\xf7\x7e\x6f\x1a\x04\x6c\x0a\xb6\x29\x68\x73\
\xc4\xe6\xc2\x06\xe3\xeb\x17\x76\xc4\x33\x89\xf0\x4b\x6e\xff\x05\
\xfc\xde\x8c\x3f\x9b\x34\x1f\x48\x6d\x7c\x48\xe7\x09\xfd\xea\x3c\
\xfb\x41\x05\x3f\xda\x62\x08\xc8\x72\xe5\xfc\x0f\xc4\xd4\xf3\xdf\
\xbf\x1e\x4e\x8f\x7f\x00\x99\x10\x60\x68\x50\x98\x6d\x95\xaf\xa0\
\xb6\x6a\xc0\x12\xa0\x78\x8d\xff\xff\xf1\x53\xb3\xf1\x12\x1a\x86\
\x46\x60\x02\x82\xc8\x44\xba\x4e\x22\x5a\x79\x9e\x4b\x14\x9a\xc9\
\x02\x93\x05\xa0\x40\x22\xc0\x08\x11\x21\x7e\x93\xd1\xa4\x41\x7f\
\xf9\xd2\xe3\x83\xf0\x3f\xd7\x07\xd6\xc3\x58\x0f\x6c\x3d\x5f\x0c\
\x43\x1b\x20\x86\x1b\xf4\x97\x51\x6c\x9d\xd3\xf7\x11\xf0\x0a\xf0\
\xb4\xc4\x4b\x66\xcc\x4d\x26\xa9\x8a\x22\xd8\xf8\xe0\xae\x11\xfc\
\x46\x4e\x39\xe7\x6b\xfc\xf0\xd1\x39\x00\xb5\x78\xcf\xe4\x1a\x4f\
\x47\x50\x88\x8b\x2b\xb2\xe6\x9b\x1e\xaf\x00\x90\x2f\x02\x86\x5f\
\xd8\xd2\x1c\x23\xe5\x64\xac\x17\x6c\x00\xc0\x27\x8f\xbe\x8f\xa5\
\x03\xd4\xa0\x4d\x4c\x1c\x02\x6c\x27\xf3\x2b\x84\x9c\x10\x24\x53\
\x90\x5c\x19\x21\xb3\x34\x09\xa0\x40\xa8\x10\xef\x64\x2c\x40\x14\
\xe2\x47\xeb\x41\xc0\x20\x79\xaf\x76\x02\x48\x62\x24\xf1\xf5\xf0\
\xc3\xd5\x9a\xb8\xd2\x04\x6e\x34\xe1\x3b\x29\x29\xd1\x80\xcf\x80\
\xfb\x80\xc5\x9b\x1e\x9a\xe7\xbc\x07\x1f\x82\x4c\xf9\xae\x6d\x96\
\x0c\x54\xd0\x18\xb1\x88\xc9\x1f\xbd\x40\x01\x00\x60\x32\x12\x59\
\xbd\xcd\x52\x04\x01\xfd\x7f\xd2\xcf\xff\xdb\x05\x28\x60\x14\xe2\
\x36\xc1\x0f\x80\xa0\x43\xa5\x71\xe5\xb5\x3a\xe5\xdd\xae\x03\x95\
\x01\xfd\xc1\xb2\x4d\x79\x02\xe0\x08\x60\x20\x70\xde\xf2\xa7\xde\
\x79\xaf\xa2\xae\x80\x2d\x0e\xef\x04\x27\x5f\xeb\xbb\x22\x0d\x4d\
\x77\x5e\x11\x60\x21\xc4\xfb\x07\xe7\x9b\x1e\xff\x00\x1a\x2b\xcf\
\xb2\xb2\x62\xcd\x9a\xc7\xca\x8a\x34\xc0\xa2\x87\x17\x23\x39\x10\
\x3f\x44\xf6\x3d\xc9\x02\x56\x55\x0b\xea\xe0\x83\x56\x1f\xca\x7e\
\x44\x12\x15\x88\x8f\x11\xcf\x21\x7e\x07\x4c\x00\xdb\x17\xd9\x39\
\x88\x65\xca\x7e\xbd\x10\xf1\x23\x89\x19\x15\x75\x05\x07\x60\x96\
\xf8\xf8\xa1\x0f\x5a\xdd\x9f\x1d\xfb\x6c\x0c\xc9\xd4\xbc\x9d\x46\
\xe3\xce\x2d\x7d\x6d\x7d\x46\x77\xa3\xc7\x5b\x00\x5a\xfd\x17\x6b\
\xf9\x7d\xaa\xa5\x92\xcd\xfe\xf7\x29\xf3\x77\xdd\x62\x74\xe3\xbd\
\x77\xd9\xfb\xd7\xd2\x17\xf9\x14\xdc\x6a\xa0\x0c\xef\xb1\x9f\x0d\
\xbc\x2f\x78\x0f\xec\x23\xe1\x96\xa5\xe5\xaa\x03\x4c\x09\x05\x98\
\x98\xe3\x02\x1c\x70\x99\x44\xae\x0d\x37\x76\x43\xdc\x83\x74\x9d\
\x43\xb7\xcf\x7b\x70\x4e\xd9\x96\x47\xe4\x65\x4a\x90\x49\x4c\x82\
\x66\xab\x34\x63\xf2\x4e\x8f\x57\x00\x8d\xb4\x2a\x3c\x26\xf1\xc9\
\x6e\x9b\x87\x16\x65\xdd\x2d\xa8\xb3\x49\xe1\x1d\x62\x75\x78\x41\
\x5f\x80\x9f\xaf\x7f\x8a\x98\x07\x7c\x64\x30\x1f\x28\x37\x59\x15\
\x10\x6d\x71\x64\xf6\x3c\xfc\x8f\x1f\x9c\x9b\x46\x9a\x06\x96\x12\
\x5c\x86\x18\x99\xe3\x3b\xd7\x07\x2e\x34\xec\xcb\x60\x97\x7f\xfa\
\xe8\xc7\x6f\xba\x28\xd2\x16\x87\x6f\xdd\x25\x37\xbc\x52\xe8\x1b\
\x9f\x4b\xf3\x22\xa1\x31\x79\xa6\xc7\x2b\x00\xb9\xcc\x2c\xc6\x56\
\x1a\x01\x6b\xfe\x7d\x75\x23\xc1\x19\xf2\x9e\xe7\xe2\x6c\x83\x66\
\x1b\xdf\x47\x19\xa4\xf1\x47\x00\x14\x66\xcf\x77\x59\xc9\x32\xe0\
\x5a\xe0\x0f\x06\xcb\x14\x50\x23\xe7\x6a\xeb\xea\xea\x1b\x12\x89\
\x90\xd1\x63\x5b\x5f\xa7\x70\x8b\x23\xb6\x65\xde\x83\x1f\xa4\xcd\
\x98\x81\xa3\x14\xb8\x42\x30\x9a\xec\xdf\x5f\x04\x1c\x2a\xd8\x25\
\x9d\x4a\xdf\x04\xdc\xf7\xe1\x83\xb3\x57\x04\xae\x98\x2d\x8f\xda\
\xbc\xc3\x9f\x47\xb3\x1e\x5a\xcb\x92\xe3\x98\xfc\xd2\xf3\x15\x40\
\x93\x5f\x5a\x7a\xb1\x32\x51\x82\x10\xbf\x53\x4d\x36\xde\x06\xfe\
\x8b\x17\x68\x87\x1f\xb5\xc9\xfc\x5e\x9f\x39\xea\x32\x47\x95\xa0\
\x0a\xa3\x12\x31\x00\x38\x09\xd8\xbd\x85\xef\xdf\x00\x38\x12\xf8\
\xc4\xa1\x0f\x2c\xb2\xba\xad\x8e\x6a\xbf\x49\xbe\xe5\x11\xdb\x30\
\xf7\xa1\x39\x6e\xc9\xc8\xe5\x4f\x8d\x5a\xb4\xde\x52\xc4\x15\xc0\
\x37\x94\x5b\x07\x6d\x03\x5c\x09\xec\x8f\x12\xd7\xa7\x2d\xf5\xf2\
\x07\x0f\x7c\x50\xbf\xcd\x91\x6d\x5f\xed\xd7\x5a\xb4\x16\x29\x8f\
\x95\x40\xfe\xe9\xf1\x0a\xc0\x65\xde\x22\x5b\xf9\x9f\xd5\xc8\xf2\
\x86\x65\xc2\x51\xc5\xd9\xfe\x00\x3c\xd1\x3f\xb2\x8b\xea\x02\x25\
\x01\xb9\x4c\x60\x50\x41\xa0\xc8\x25\x9c\x99\xb4\xed\x31\x5b\xae\
\xf1\xb1\x8f\xee\xff\x80\x06\x12\x24\x49\xff\x0b\x6f\x8e\x1f\x00\
\x84\x59\xc4\x30\x00\xbe\x8c\x98\x0a\x76\xbf\x83\x6b\xdf\xbf\x6f\
\xee\x07\x21\xc6\x36\x47\xb7\x4f\x08\xb7\x3d\x7c\x34\x1f\xdd\xff\
\x01\xce\x82\x57\x03\x34\x5e\xe8\x42\xe0\x30\x44\x51\x0e\x35\x50\
\x04\x1c\x88\xf8\x7a\x20\x1e\x01\xee\xf8\xf8\x81\x0f\xde\x95\x48\
\x6f\x79\x54\xc7\x2b\x82\x4c\x8e\x06\x71\x15\xf6\xee\x4b\x8f\x57\
\x00\x2b\x47\x7e\xf3\xb1\xc0\x2c\x7f\x5e\xf9\x3f\x33\xd1\x80\x50\
\x59\xa7\x00\x02\xa8\xaa\x0e\xa5\xad\x8f\xde\xa6\xa1\xb5\x5f\xbf\
\xd5\x51\xdb\xf0\xf1\x23\xf3\x89\xea\x1a\xde\xb5\xc0\x8e\x17\x76\
\x1e\x30\x51\xb2\x01\x90\x6d\x38\xd6\x20\xb0\xe3\x81\xaf\x87\xd8\
\xf5\x38\x1e\x9f\x5b\x32\xaf\x32\x31\xbc\x86\x2d\x0f\xdc\xa9\xcd\
\xb7\xbf\x55\x46\x70\x3f\xbc\x7f\xee\x3c\xe0\x34\x64\xef\x00\x67\
\x4a\x36\x32\xfb\xf7\x03\x68\x04\xd8\x09\x82\x1f\xa4\x1d\x0f\x18\
\xdc\xfb\xe1\xcc\x0f\xe7\xca\x48\x6f\x73\x4c\xc7\xf9\x07\x56\x4e\
\xfd\x05\x16\xef\x05\xd0\x2d\xe9\xf1\x61\xc0\x95\x64\xc2\x80\x4d\
\x0f\x84\xd6\x70\x42\xf9\x30\x59\x71\xf3\x73\xcd\x49\x56\xdb\xba\
\x65\xb4\x6b\xb2\xc5\xa1\x9b\xb2\xf5\xd8\xd1\x38\xc2\xc5\x22\x38\
\x4f\xb2\x33\x33\xe1\xbb\x2c\x6d\x32\x9f\xbb\x24\x76\x41\xdc\x8a\
\x31\xd5\x42\xf7\x8d\x74\x69\x71\xc1\xdc\x7b\xdb\x5f\xb6\x60\xeb\
\xa3\xb6\x45\x04\xe5\x0a\xec\x5a\x61\xe3\x10\x7f\x47\xa4\xd5\x72\
\x1b\x36\x47\x9c\x2b\xf1\x8c\x8c\xcb\x05\x5f\x9e\x7b\xef\x07\x85\
\x73\xef\xeb\xe0\xb0\x61\x8e\x67\x23\xe7\x8f\x98\xfc\xd1\xe3\x15\
\x80\x93\x3f\x1a\xcd\xcd\x66\x87\x56\x9b\x8b\x66\x14\x80\xe4\x7d\
\x00\x6b\x9c\x03\x91\xa0\x76\x5d\xe6\xa5\xdb\x1c\xbd\x15\x98\x6a\
\x82\x42\x4d\x15\x1c\x23\xbf\x00\x27\xca\xd9\x36\xe8\x27\x38\x54\
\xe2\x11\xd0\x65\x88\xd1\x1f\xdd\xf7\x7e\xf0\xc1\xbd\x73\xdb\xf7\
\xfd\x47\x6d\x4d\xe0\x88\xe6\x97\x15\xfc\x49\xd8\x91\x82\xdf\x09\
\xbe\x50\x4b\xfd\xe3\x37\xfa\xd8\x52\x70\x36\xe2\x69\x64\xb7\x23\
\xfb\xbf\xb9\x33\x3f\x18\xfa\xc1\xcc\x0f\x6d\xee\xcc\xf6\x2b\xa5\
\x35\xbe\x57\x59\x9e\x0b\xb1\x1f\x20\xdf\xf4\x78\x05\xb0\x32\x99\
\x66\xb5\xdf\xd7\x48\xb0\x61\xd5\x4b\xe6\x00\x87\x85\x82\x7e\x6b\
\x9c\xcb\xca\xc4\x9b\xda\xb5\x39\xae\xd6\xc6\x36\x47\x6d\x4d\x54\
\x1d\x4a\x61\xfa\x55\x39\xc6\x02\xbf\x03\x96\x36\xb6\x27\xab\x14\
\xfa\x5a\x06\x67\x09\x9e\x4a\xbb\xc4\xaf\x9c\xc2\xad\xdf\x9f\xfe\
\xb1\xcd\x99\xf1\x51\x9b\xbf\x7f\xab\xa3\xb7\xe6\xbb\x27\x6f\x0a\
\xd2\xe7\x26\x2e\x46\x1c\x05\xfc\x5e\x50\xa7\x96\xda\xe0\x2b\x78\
\x6c\x04\x4c\x90\x78\x40\xd8\xe3\x0e\x7e\x2e\xd9\xae\x73\x4a\x3e\
\xec\xff\xce\xb4\x59\xcc\x9e\x3e\xa7\x6d\x8f\x66\xf5\xe4\xac\x26\
\xcf\xa7\x49\xf7\xc7\xe4\x89\x9e\xaf\x00\x08\xfc\xd1\x64\x2f\xc0\
\xd5\xf6\xa3\xb3\x95\x66\xbd\x0c\x1c\x09\x64\x45\x59\xce\x8d\x90\
\xd5\x76\x44\x7a\xfa\xb6\x13\xb6\x64\xf4\x91\xdb\x21\x6c\x91\x13\
\x17\x49\x36\x41\xb2\x57\x9a\xee\x97\xd7\xe4\x30\x64\xa3\x91\x5d\
\x88\x78\xd2\x02\xf7\x73\xd0\xe6\x1f\xcc\xf8\xcc\xe6\xcc\x98\xd7\
\xf6\x36\x8c\xdd\x9a\x3a\x5c\x2a\x61\xe1\x9f\x9d\x31\x16\xec\x4c\
\x64\x6f\xaf\xa5\x0d\x8d\x7d\x31\x08\xd9\x77\x90\x5d\x0a\x3c\x8b\
\xd9\xbd\xc9\xb0\xdf\xa9\x66\xc9\x5d\xde\x2f\xf9\x68\xd0\x47\x25\
\x73\x6c\x6e\x49\x2b\xa6\x09\x32\x50\x90\xeb\x3b\xd6\xb4\xce\x62\
\xf2\x42\x8f\x77\x02\xae\x11\x05\xc8\xce\xaa\x48\xa1\xff\xad\xd0\
\xaf\x43\x6f\x46\x84\x51\xdb\x91\x6d\xdb\x6e\xdc\x56\xcc\x9e\xf9\
\x51\x6a\xf4\xd8\xb7\x9f\x9b\x33\xe3\x4b\x6f\x23\x8e\x03\x26\x01\
\xa3\x5a\xf8\x58\x88\xdf\xe0\xe4\xb7\x60\x47\x45\x34\x3c\x68\xf0\
\xc4\xec\x99\x1f\x7e\x68\x22\xbd\xdd\xb8\xd6\x3b\xe9\xc6\x8c\xf5\
\x6b\x00\xe6\xcc\x98\x57\x6a\x16\xdc\x21\xa2\x3f\x48\x1c\x03\x8c\
\x03\xb6\x62\xed\x0b\x16\xc2\x4c\x5b\x0f\x02\x0e\x04\x2d\x03\xde\
\x4d\x93\x78\x5d\xf0\xfa\xfb\x25\x1f\xbd\x0b\x7c\x01\x51\x35\x98\
\xdb\x6e\x7c\x93\x48\x42\xe3\x68\x1f\xfb\xff\xba\x2d\x3d\x5e\x01\
\xac\x41\x8e\x4c\xa0\x26\x14\x93\x6d\xeb\x70\x11\xa1\x8e\x2f\x1d\
\xb6\xfd\xd8\xad\x00\x98\x13\x7d\xb4\x40\xb2\x8b\x2d\xd4\x1f\x05\
\x67\x21\xbe\x47\x63\x38\x32\xbb\x80\x24\xf0\x55\x80\x76\x94\x98\
\x64\x91\x3d\x03\x7a\x78\x76\xc9\x47\x6f\x26\x94\xac\x5d\x1a\x2e\
\x62\xcf\xb1\x5f\x6b\x55\x1b\x46\x8f\xdb\x12\x40\xb3\x67\xce\xfb\
\x58\x41\x70\x69\x10\x45\x8f\xe3\x73\x12\x0e\x45\x6c\xb5\xb2\x3f\
\x5a\x16\xd4\x04\xb0\x21\xb0\xa1\xc4\x3e\xf8\x32\x6b\xcb\x80\x39\
\x10\xce\x02\xde\x9f\x3d\xfd\xa3\x0f\xc1\x3e\xc1\x28\x47\xd4\x99\
\x45\xb5\x52\xa0\x56\x3e\x97\x98\x3c\xd0\x6b\x14\xc0\x1a\x8e\xbe\
\x96\xcf\xc9\xae\x00\xfc\x5a\xfa\x0e\xb5\x00\x56\x67\xf4\xb1\x5b\
\xf1\xc1\x9d\x1f\xa5\xd3\x09\xfb\x47\x10\xf1\xb6\x33\x1d\x04\x9c\
\x88\xcf\x0d\x48\xb6\xf0\xd1\x10\x3f\x5a\x9f\x0e\x76\x14\xe2\xcf\
\x69\xd2\x4f\x0e\x4d\xaf\xf7\xf2\xec\x7b\x3e\x5a\x02\x44\xdb\x4f\
\xdc\xaa\x55\x6d\xd8\x7e\xec\x96\x00\x6e\xce\xf4\x0f\xdf\x35\xe3\
\xc2\x48\x36\x03\x38\x14\x38\x04\xd8\x3e\xb3\xcc\xb9\x35\x18\x7e\
\x39\x74\x7f\x7c\x75\xdf\xfd\xf1\x19\x91\x55\xa0\x72\xc4\x02\x60\
\x61\xe4\x82\xcf\x32\x6d\xcf\xbe\xe2\x32\x56\x02\x79\xa7\xc7\x2b\
\x80\x95\xa9\xc0\xac\x7d\xd7\x79\xf9\xb2\x14\xc5\x88\x30\xcb\xb9\
\x9d\xaa\x00\x00\xb6\x39\xce\x0b\xea\xbb\xd3\xe7\x95\xa7\x83\xa0\
\xa4\xc0\xe9\x4f\x88\xa3\x81\x89\xf2\x99\x7a\x01\xe4\x1c\x88\x0d\
\x9f\xdb\x7f\xa4\xbc\x49\x3e\x17\xb3\xdf\x03\xcf\xbc\x37\x6d\xde\
\x3b\x16\x24\xaa\x24\xc7\x0e\x13\x36\x5f\x6b\x3b\x46\x4f\xd8\x1a\
\x20\x7a\xbf\xe4\x83\xb9\x98\x2e\x97\x0b\xa7\x21\xdb\x17\xaf\x0c\
\xf6\x00\x86\xad\x9e\x3f\xd1\x4a\x0b\x3e\x81\x2f\xff\x3d\x04\xaf\
\x14\x5a\x41\x3c\x37\xc8\x37\x3d\x5f\x01\x64\x7e\x1a\x59\x56\x9c\
\x65\xff\x40\x3f\x7c\x32\x50\x53\x22\x41\x6d\x57\xbc\x92\x3b\x4e\
\xf0\xd9\x84\xef\xdd\xf3\xc9\x42\x29\xb8\xda\x82\xf4\x53\xc0\xd1\
\xc0\x51\x88\x2d\x57\x0a\x5f\xee\xc6\x14\x03\x3b\x03\x5f\x42\x4c\
\x01\x5e\x57\x94\xfe\x0b\xf0\xb7\x77\xef\x99\x37\x97\xa4\x55\x11\
\x69\xe5\xf7\xe4\x22\x33\x67\x77\xb3\xa6\x7e\xf2\x79\x7d\x6d\xbf\
\x19\xc5\xfd\xab\x9e\x30\x7f\xdd\xfd\x32\xc7\x4e\x88\x81\x6b\xf4\
\x55\x2c\xb3\xbd\x8a\x1e\xaf\x00\x1a\xe9\x80\x29\x40\x0a\xa8\xef\
\x4a\xab\x74\x87\x89\x9b\x03\xb8\xd9\xd3\x3e\x9e\x03\x5c\x24\xf4\
\xb0\xe0\x28\xbc\x49\xbe\x65\xa6\xec\x58\x4b\x18\xbe\x70\xe8\x81\
\xf8\xe2\x26\x4b\x81\x7f\x93\xd2\x9f\x0d\x5e\x7e\xef\x9e\x79\xf3\
\x20\x2a\x07\x73\x3b\x4c\xcc\xed\x3c\x1c\x33\x69\xf3\xc6\x5f\x2b\
\xdf\xbd\xe7\x83\x97\x09\xd2\xaf\xe0\x0a\x6e\x06\xfb\x2a\xf0\x6d\
\xe0\x1b\xf8\xc5\x46\x43\x5a\xd1\xa6\x98\x1e\x44\xcf\x57\x00\x6d\
\xc9\x24\x5b\xa5\x00\xb2\x85\x3f\xeb\x60\xb5\x62\x80\x5d\xc8\xf6\
\xc7\x6e\x01\x10\xbd\x3b\x75\xde\x3b\xc0\x05\x18\xd3\xf1\xe5\xca\
\x0e\x07\xb6\xa3\x35\x5b\x7b\x79\xa5\xb6\x21\xf0\x43\xe0\x07\x82\
\x15\x78\x07\xdd\x2b\x60\xaf\xbc\x33\xf5\x93\xb7\x30\x96\xa5\xd2\
\x41\x6d\x18\x88\x2f\x4d\xce\x6e\xa5\xef\x38\x71\x9b\xc6\x9e\x5a\
\xfa\xde\x3d\xf3\x7e\x0f\xee\x05\x11\x0e\x46\x8c\x06\xed\x09\x7c\
\x05\xef\x9c\x1c\x85\xaf\x3c\xd4\x0b\x42\xc9\x7d\x97\x1e\xaf\x00\
\x56\x8d\xd8\xb6\x56\xa7\x92\x64\x64\xd6\x01\x34\x1f\xc5\x8c\x5a\
\x50\x94\x4f\x1b\x77\xc7\x49\x5b\x02\x44\xef\x4d\xfd\x68\x6e\x20\
\xf7\xbb\x88\xf0\x5e\x99\x7d\x1f\x38\x04\xf1\x75\xbc\xc0\xb5\x86\
\x04\xde\x5f\xb0\x3e\xc6\x37\x11\xd5\xa0\x85\x88\xb7\x92\x41\xf4\
\x16\xf0\xe6\xbb\x77\x7f\xfc\x8e\x89\xe5\xc9\x54\xb2\x26\x0a\x23\
\x6d\x77\x7c\x73\x85\xb0\xc3\x44\xdf\x1e\x60\xc5\xbb\x77\x7e\xf2\
\x4f\xa2\xf0\x9f\x14\x44\x45\x42\xc3\x80\xd1\xf8\xed\xd8\xb7\xf1\
\xbf\xb3\x25\x30\x0c\x9f\x64\xd5\x1a\x85\x15\xcf\x26\xba\x01\x3d\
\x5e\x01\xac\x44\x6b\x77\x2a\x67\xfe\xde\x4f\x59\xee\xdb\x44\x6d\
\x66\x27\xdb\xbc\xb3\xc3\xa4\xad\x00\xdc\x3b\x53\xe7\xcd\x77\x61\
\xea\xce\x30\x9d\x7c\x58\xde\x0c\x3f\x08\xd8\x57\xb0\x31\xad\x7d\
\x76\xde\x37\x3a\xc0\xbc\x90\x8e\xc6\x3b\xfa\xaa\x05\x9f\x3b\x63\
\x76\x54\x90\x7a\x0f\x78\x6f\xd6\xd4\x8f\xdf\x03\x16\x82\xaa\xc2\
\x28\x5d\x23\x0b\xb4\xe3\x94\x55\xd3\x86\x1d\x8f\xdb\xbc\xf1\xd7\
\x3a\xe0\x73\xe0\xf3\x59\x77\xcd\x7b\x09\x11\x12\x04\xfd\xcd\x3b\
\xff\xb6\x00\x6d\x86\xb7\x0e\x46\x02\x23\x05\x63\x32\xdf\xbb\x86\
\xbc\xaf\xf4\xd9\xc4\xe4\x95\x9e\xaf\x00\x5a\x51\x07\x00\xe0\x9d\
\xbb\x3f\x22\xad\x04\xa1\xa5\x47\x92\x65\xf0\x11\x54\x80\xa5\xbb\
\x53\x6c\x6a\xa7\x49\x2b\x9d\x78\x65\xb3\xee\xfe\xe4\xf7\x82\x3f\
\x1a\x6c\x05\xfa\x1e\x7e\xce\xbf\x5b\x66\xa7\xdd\xb5\x39\x0d\x57\
\xbf\x2b\xc3\x6f\x36\xb2\x6d\xe6\xf8\x31\xa2\x1e\x51\x81\xb1\x18\
\xd9\x47\x91\x25\x3f\x02\x3e\x9c\x75\xd7\xbc\x4f\xc0\x3e\x03\x96\
\x19\xaa\x91\xa9\xae\x1e\x97\x0a\x31\x76\x9d\xbc\x35\x63\xa6\xac\
\xb4\x10\x2a\x32\xc7\x7c\x80\xb7\xee\xfe\x10\x27\x2c\x4c\x04\x09\
\x8b\xec\x44\xe0\x1a\x9a\xbc\x6b\xb1\xf0\x77\x0f\x7a\xbc\x02\x68\
\xed\x8b\xe4\x14\x10\x58\xb4\xa9\xc4\x77\x73\x54\x03\x5a\x28\xd4\
\x25\x51\x80\xf6\x30\x66\xf2\xe6\x00\x0d\x6f\xdd\xfd\xf1\x6c\xa1\
\xf7\x43\x82\x69\x48\x63\xf0\x31\xf8\xef\x02\x3b\x48\x0c\x26\x47\
\x69\x84\xb5\x50\x08\xac\x87\x58\x0f\x3f\x62\x03\x44\xc2\x6a\xc8\
\x24\xfc\x08\x5b\x80\xec\xf3\x02\x82\xc5\xc0\xe2\xb7\xef\xfa\xf8\
\x0b\x60\x19\x62\x05\x58\xa9\x19\xb5\x12\x69\x50\x1a\x67\x51\x68\
\x16\xb9\x34\xf5\x21\x2e\xd7\xb6\x6c\xb1\x0a\xe8\x06\xf4\x68\x05\
\x30\xeb\xae\x8f\x29\x2c\x68\xb0\xba\xfa\xc2\x11\xb4\x30\x3f\x0e\
\x08\x71\x44\x9b\x9b\xb8\x00\x6c\xb7\x2c\x6f\x9e\x03\xde\x0e\xcc\
\x52\xe9\xa8\x2e\xdf\xb7\xd5\x22\x3b\x4f\xde\x02\xbc\xf0\x94\xcf\
\xba\x63\xde\xcb\x26\xf7\xaa\x0b\xc3\x1b\xc0\xbe\x84\xd8\x1b\xf8\
\x16\xb0\xa3\xfc\x7c\x7c\x5d\x3c\xf6\x21\xbe\x4f\x07\xe2\xab\x19\
\xad\x5e\xb3\xcc\x01\x0d\x99\xa3\x1e\x68\x90\xa8\xc2\x5b\x51\x55\
\x40\x2d\x52\x5d\x00\x35\xc2\xb6\x24\xbb\xa3\xb0\x1f\xb0\x5e\x20\
\xb7\x7c\xd6\x9d\x1f\x6b\xcc\x71\x5b\xe4\xbb\x6b\xfb\x24\xdd\x75\
\xc0\x6b\x91\xb7\xee\xf8\x04\x33\x21\xd9\xa6\xf8\x3a\xf8\x87\x00\
\x3b\x63\x59\x9c\x4f\xe2\x6f\xf8\x52\x5f\x7b\x01\x3b\x61\x4d\x84\
\xc2\xe7\x0e\x2c\x30\x38\x50\xf0\xd6\xce\xab\xe6\xba\x3d\x8a\x77\
\x6e\xff\x84\x10\x59\x2a\xb0\xc1\xc0\xb6\x82\x3d\x81\xaf\x01\xbb\
\x02\xa3\x32\x65\xcb\xd6\x7c\xde\xf9\x7d\xfa\x69\xc4\xbb\xc0\xe3\
\xc0\x7d\x96\xb0\x79\x8a\xa4\x9e\xda\xff\x3d\x95\x1e\xa7\x00\xde\
\xbc\xfd\x63\x84\x92\x46\xf0\x7f\x18\xbf\x04\xbe\x6c\x90\xc8\x65\
\x4f\xb6\xa6\x12\xb5\xc1\xcd\x01\xc1\x59\x42\xa9\x2f\x1d\xdf\xca\
\x24\xb6\x6e\xcc\x9b\x77\x7f\x02\x05\xc2\xea\xac\x08\x6f\xda\xef\
\x88\x0f\xdf\x7d\x05\xd8\x09\x18\x21\x9f\xc6\x9b\xd7\x10\x5e\xa6\
\x60\xb0\x03\xde\x01\xae\x00\x3d\x0e\x56\xb7\xcb\xf1\x9b\xe7\xb5\
\xff\xfa\x12\x3d\x4a\x01\xbc\x79\xfb\xa7\x00\x45\xa0\x13\x80\x5f\
\x01\x23\x3a\xe0\xb2\x6f\xe1\x17\xc6\xcc\xde\xe5\x84\xcd\xf3\x7d\
\x8b\x9d\xc2\x5b\xb7\xcd\xc7\x64\x16\x85\x51\x3f\x60\x7d\x13\xa3\
\xf1\x26\xfd\x4e\x99\x9f\x9b\xe2\x4d\xfd\x62\xf2\xf7\x4e\x94\x03\
\xd7\x98\x71\x1d\x50\xb5\x73\xac\x04\xba\x84\x1e\xa3\x00\xde\xbc\
\xf5\x13\xcc\x11\xba\xd0\x4e\x06\x2e\x31\x63\x50\xbb\x2f\x96\x59\
\xa2\x6a\x7e\x13\x8e\x13\x2b\xeb\xc3\x17\x86\x14\xa5\x19\xd3\x47\
\x5e\xba\xd7\x6f\x5b\xc4\xd7\x1a\x36\xe4\x7f\x05\xf3\x0b\xcc\x34\
\x08\x6c\x14\x68\x6b\xfc\x7c\x7d\xab\xcc\xb1\x05\x3e\xb4\x57\x2c\
\xe5\x4c\x9e\x5a\x59\xf6\xbf\x59\x8c\xaf\x7d\xd4\x4a\x5c\x8e\xd3\
\x15\x18\x0d\xbb\x9e\xb4\x79\xbe\xbb\xaa\xd7\xd3\x23\x14\xc0\x5b\
\x37\xcf\x27\x4a\x38\x4c\x76\x00\x30\x15\xef\x94\x5a\x17\x04\xfc\
\x0f\xf8\xa5\xea\xd2\x2f\x5a\x41\xa8\x5d\x4f\xde\x3c\xdf\xb7\x99\
\x77\xfe\x7b\xeb\x27\x08\xc2\xc0\xac\xbf\x60\xa0\x99\x8d\x44\xda\
\x08\x9f\x77\xb0\x51\xe6\xd8\x00\x6f\x79\x0d\xc7\x3b\xf2\x0a\xc8\
\xec\x92\x8c\x77\x2a\xaf\xeb\xb4\xa2\x0c\x71\x32\x85\xba\x9f\x3a\
\x63\xd7\x93\x7b\xfe\x94\xac\x3b\xd3\x23\xa2\x00\x51\x20\x10\x23\
\x25\xce\x25\xb7\xf0\x3b\x7c\x1c\x7a\x01\x3e\xbe\x3d\x82\xe6\x2f\
\x63\x0a\xbf\x1b\xcf\xe3\x06\x77\x06\x85\x7c\xe0\x48\xb0\xeb\x89\
\xf1\x4b\x06\xf0\x65\x3f\xe2\xae\x1e\xd7\x5f\x08\xfc\xe7\x8d\x5b\
\x3e\x25\x72\x50\x10\x52\x80\x59\x91\x43\x05\x86\x8a\xc0\x86\xca\
\x6f\x79\x3e\x18\x63\x10\x62\x70\xc6\xb7\x50\x8c\x2f\x41\x5e\xc8\
\x2a\xa5\xd0\x58\x01\x6c\x28\xde\x39\xb9\x05\xd9\xa3\x14\x43\x80\
\x73\xa8\xb7\x7f\x61\xb4\xbf\x20\x61\x4c\xab\xe8\xf6\x16\xc0\x7f\
\x6f\xf9\xb4\xd1\x5c\x3f\x09\xb8\x1e\x9a\xac\x9d\xf7\x9e\xbd\x0a\
\x8c\x12\x83\x3b\xc1\x16\x4a\x1a\x8d\xb1\x27\x5e\x11\x0c\xc1\x2b\
\x87\xc5\xc0\x2c\x83\x57\xc0\x3e\x06\xa5\x76\x3d\x29\x16\xfc\xce\
\xe2\xb5\x5b\x16\x21\x05\x16\x58\xbd\x25\x82\x34\x4e\x01\x91\xf7\
\xde\x26\x8d\x60\x73\xd0\xc4\x4c\x85\xa4\xa1\x59\xde\x42\x87\xf8\
\x75\xc5\x82\xe8\xd2\x81\x1b\x85\xec\x76\x4a\xfc\x9c\x3a\x8b\x6e\
\xaf\x00\xde\xb8\x65\x3e\x78\x21\x7e\x1c\xd8\x3b\xcb\x29\x55\xc0\
\x6f\x4c\xba\x15\xa8\xfd\xf2\x29\x9b\xb1\x60\xea\x62\x6e\xfb\xf0\
\x43\x0e\xde\x64\xe3\x10\x59\x52\xce\x50\x60\x0d\x26\xdc\x6e\xa7\
\x6c\x92\xef\x5b\xea\xf3\xbc\x71\xf3\xa7\x98\x28\x50\x60\x13\xf0\
\x45\x53\x87\x66\x39\xed\xbf\x26\x3b\x40\x68\xf1\x6e\xa7\x6c\x9a\
\xef\x26\xf7\x5a\xba\xbd\x02\xf8\xcf\xcd\xf3\xc1\xd8\x03\xf1\x0c\
\x7e\xde\xd9\xf4\x06\xee\xc4\x82\x33\x40\xb5\xbb\x9d\x1c\x0b\x77\
\x4f\xe1\x8d\x5b\xe6\x93\x59\x34\xf4\x5b\xe0\x67\x59\xb6\x34\xab\
\xc2\xaf\x86\x7c\x6e\xf7\x58\x01\x74\x1a\xdd\x7a\x29\xe7\x9b\xd7\
\x2f\xf5\xbf\x38\x76\x25\xfb\x28\xb1\x58\x70\xb7\xe4\x62\xe1\xef\
\x61\xec\x76\xf2\xa6\x08\x1a\x84\xa6\xcb\x47\x63\xd6\x44\x0c\x40\
\xec\x66\x66\xbc\x7e\xfd\xa7\xf9\x6e\x6e\xaf\xa5\x5b\x2b\x80\x74\
\xa2\x8e\x01\x83\x07\x01\x6c\x83\x32\x75\xe5\xd6\x3c\xde\x42\xcc\
\x89\xb3\xca\x7b\x26\x99\xb2\xe0\xf3\x24\x5e\xcb\xb2\x57\x02\xc0\
\x56\x38\x57\x10\x86\xdd\xde\x50\xed\xb1\x74\xeb\x28\x80\x1c\x54\
\xae\xa8\x2c\xc4\x6c\xfd\x1c\xa7\xcc\x19\x42\x6d\x65\x05\x45\xf9\
\x6e\x6a\x4c\x3b\x08\x9c\x88\x82\xb0\xce\xa4\xb9\x39\x74\xf8\x06\
\x32\x2b\xc4\xaf\x39\x88\xe9\x04\xba\xb5\x05\x20\x84\x4c\x09\x41\
\xff\x1c\x1b\xda\x94\xad\x50\xb1\x0a\xc3\x78\x83\xb9\x9e\xc8\x6e\
\xa7\x6f\x06\x12\x82\xb2\x9c\x5b\xa7\xc9\xc2\xf6\xec\xd7\x18\xd3\
\x3a\xba\xb5\x05\x90\x31\x05\x85\x2f\xd8\x99\xb5\xfd\x01\x46\x45\
\xba\x5b\xeb\xb1\x98\x96\xf0\x0f\x36\xb1\xea\xd7\x55\x58\x9e\x4a\
\xb4\xf5\x25\xba\xb5\xe4\x08\xc3\x11\xa4\x24\xca\x73\xec\x67\x37\
\xd2\x9c\x92\x89\x78\x84\xe8\x91\xfc\xeb\xc6\x85\x3c\x36\xf3\x0e\
\xe4\x93\xbc\xb2\xed\xed\x58\x0e\x4a\xc5\xa5\x03\x3a\x8f\xee\xad\
\x00\x2c\x81\xa5\xa3\x14\xd8\x02\x1f\x25\x6a\x72\xc8\xc6\xa4\x2d\
\x18\xba\xf6\x1d\x01\x62\xba\x23\x8a\xc4\x4f\x8f\x3e\x61\x30\xb2\
\x9d\xb3\x3c\x5b\x84\x2d\x48\x27\x54\x1b\x4f\xf0\x3a\x8f\x6e\xad\
\x00\xbe\x7e\xda\x48\x14\x84\x48\xcc\x96\x56\xed\x6e\xbb\xda\xb6\
\xd3\xa3\x91\xbe\x2a\xc1\xdf\x2f\x5d\x9e\xef\xe6\xc6\xb4\x91\xcc\
\x40\xff\x25\xc1\x2e\x59\x9e\xad\x43\xcc\x0e\x1a\x02\x57\x9b\x8c\
\x67\x02\x9d\x45\xb7\x56\x00\x40\xe3\x5b\xf2\x5f\x69\xb5\x7d\xee\
\x57\x1d\x03\x25\xc6\x49\x0c\x48\x16\x77\xea\xa6\x3e\x31\x1d\xcc\
\x3f\xaf\x5d\x88\x14\x14\x4a\x1c\x23\x31\x3c\xcb\xb3\x5d\x2e\xf1\
\x6f\x09\xf6\x8e\x57\x05\x76\x1a\xdd\x5e\x01\x48\x42\xd2\xa7\x88\
\x57\x72\xec\x2f\x77\x00\xe2\xc8\x74\x50\x6c\xaf\x5e\xfb\x79\xbe\
\x9b\x1b\xd3\x0a\x5e\xb9\x7e\x11\xc3\x96\x2f\x01\xe9\xc7\x88\x43\
\x9b\x9d\xe0\x95\xfe\xbf\x81\xf7\xf3\xdd\xd6\xde\x4e\xb7\x57\x00\
\x41\x83\x90\x05\x75\x60\x8f\x0a\xab\x6e\xb6\xd7\x3c\xd6\x4f\xd8\
\xf9\x61\x54\x77\x60\xbd\x22\x7b\xe5\x9a\x85\xf9\x6e\x72\x4c\x0b\
\xfc\xf3\x9a\x4f\xd9\x73\xe8\x48\x96\x0f\xdb\x60\x6f\xc1\x25\xc2\
\x86\x66\x79\xa6\x0d\xc2\x1e\x35\x05\x95\x0d\x71\xf9\xe0\x4e\xa5\
\x47\x78\xcf\x5e\xbd\x66\x01\x66\x0c\x96\xb3\x99\x82\x1f\xe6\xa8\
\xea\x3b\x1f\xb8\xd8\xd0\x83\xc2\xaa\xbf\xf1\xb3\x8d\xf2\xdd\xec\
\x98\x26\xbc\x72\xcd\x42\xcc\x28\x96\xb3\x1f\x83\x2e\x32\xbf\x5a\
\xb3\x19\x82\xbf\x63\x76\x28\xf0\xc5\x37\xce\x1e\x95\xef\x66\xf7\
\x6a\xba\x77\x1e\x40\x23\x06\x2e\xb2\x72\x8c\xeb\x81\xdd\x25\x46\
\x66\x51\x5d\x9b\x22\xae\x17\xb6\x2f\x30\xfd\x95\xab\x16\xfe\xc7\
\xcc\x95\x21\xa2\x3d\xcf\x89\xd7\x09\xe4\x8b\x97\xaf\x9c\x0f\x52\
\x48\x10\x0e\xc2\xb1\x8b\x60\x2c\xa6\x9f\x02\x83\x9b\x8d\xed\xbe\
\xb4\x50\x29\x70\x5d\x90\x4e\x7f\x91\x2e\x68\xed\x6e\xe5\x31\xed\
\xa5\x47\x58\x00\x00\x2f\x5f\xb5\x10\x41\x68\x70\x0e\xf0\x1b\x68\
\x71\x2f\xfb\x72\x60\x36\xf0\x06\xd8\xb3\x10\xfc\x05\xd4\xf0\xcd\
\x73\x46\xe6\xfb\x36\xfa\x0c\xff\xb8\x7a\x21\xc2\xc2\x40\x7c\x0b\
\xf4\x63\x60\x37\x7c\x0d\xc2\x21\xe4\x7e\xef\xd2\xc0\xd5\x86\x5d\
\x28\xd4\xf0\xcd\x73\x62\x2b\xae\xb3\xe9\x31\x0a\x00\xe0\x95\x2b\
\x17\x02\x36\x48\x70\x39\x70\x1c\xad\xb3\x60\x96\x03\xe7\xd5\x05\
\x0d\x77\x17\x28\xa9\xbd\xe2\x97\xaa\xd3\xf9\xc7\xd5\x0b\x48\xd6\
\xf5\x27\x5d\x50\x7b\x28\xbe\x88\x4b\x6b\xec\x78\x01\xf7\xc9\x74\
\xa6\x61\xcb\xbe\x79\x4e\x6c\xfa\x77\x05\xdd\xde\x09\xb8\x3a\xdf\
\xf8\xf9\x46\x38\xa8\x10\x5c\x28\xb8\x4b\x64\x72\x03\x68\xf1\x18\
\x2e\x71\x6a\xa1\x2b\x18\x65\x71\xc6\x60\xd7\x10\x05\xa4\x92\xb5\
\xc3\x24\x4e\x17\x8c\x6a\xf1\xf9\xf8\xe7\x97\x12\xdc\x27\xd9\xb9\
\xb8\x20\x16\xfe\x2e\xa4\x47\x29\x00\x80\x6f\xfd\x7c\x14\x88\x65\
\x88\xf3\x10\x17\x03\x4b\xb2\xc4\x90\xd7\x38\x80\x2d\x70\xec\x1a\
\x67\x94\x76\x29\xdb\x03\xdb\xb7\xe2\xd9\x94\x21\xae\x95\x82\x33\
\x81\xcf\xbf\x75\x6e\x3c\x4d\xeb\x4a\x7a\x9c\x02\x00\xf8\xd6\xb9\
\xa3\x70\x4e\xe5\xce\xe9\x2a\x27\x8e\x41\x3c\x87\xa8\xce\x35\xcc\
\x48\x0c\x10\xec\x59\x6f\xa5\xfc\xe3\x77\x8b\xf3\xdd\xfc\x5e\xcd\
\xcb\xbf\x5b\x8c\x93\x90\xd8\x43\x62\x68\x0b\xa6\x59\x1d\xe2\x25\
\xc1\x78\x87\xfb\x0d\x44\xcb\x62\xe1\xef\x7a\x7a\xa4\x02\x00\xf8\
\xf6\x79\x1b\xa1\x74\x98\x1e\xbe\x7e\xea\x4f\x26\x3b\x1a\x6c\x3c\
\xd8\x23\x60\xcb\x51\xd6\x75\x03\x7b\x14\x44\xc3\x86\x28\x8e\x2b\
\x77\x2a\x0e\x30\xc2\x7e\x60\x7b\x82\x59\xd3\xfc\x7e\xb0\x32\xb0\
\xa7\xc1\x8e\xc7\x38\xbc\xa6\xb0\xfc\x69\x41\xdd\x5e\xe7\xc6\xbe\
\x99\x7c\xd0\x63\x15\x00\xc0\x77\x2e\xd8\x90\x9d\x26\x6e\x86\xd2\
\x56\x36\xf4\xfc\xf5\x1f\x23\x62\xac\xc4\x15\x82\x28\x4b\x6e\xf9\
\x0e\x82\xad\x62\xf1\xef\x5c\x32\x99\x9b\x1b\x49\xec\x9a\xe5\x19\
\x20\x71\x9b\x93\x1d\x11\x95\x17\xcd\x70\xa9\x70\xe9\xf7\xcf\xd8\
\x9e\xef\x9c\xbb\x71\xbe\x9b\xdd\x67\xe9\xd1\x0a\xa0\x91\xbd\x2e\
\xd8\x90\x31\xe9\x10\x67\xd4\x4b\xfc\x59\x62\x59\x96\xf9\xe6\x08\
\x89\x3d\x9c\x73\xbc\x7c\x75\xbc\x70\xa8\x33\x68\x74\xea\x21\xbe\
\x22\xb1\x61\x96\x67\x50\x2e\xe9\x4f\x48\xb5\x7b\x5f\x3e\x94\xef\
\x9c\xbf\xae\xfb\xbb\xc4\xac\x2b\xbd\x42\x01\xac\xc4\xbf\x7c\x1f\
\x65\x76\x9d\x6d\x7e\xaf\x62\xcf\x00\x2b\x4a\xd7\xc7\x15\xa6\x3a\
\x83\xbf\x5f\xba\x88\x30\xb2\x50\x62\x4f\x68\x52\xa7\xcd\x9b\x5e\
\xf3\x20\x78\xb7\x1d\x97\x8e\xe9\x24\x7a\x95\x02\x50\x10\x10\xd4\
\x46\xe5\xc2\x5e\x6d\x96\x5f\xee\x73\xcc\xbf\xe2\x08\x36\x8c\xeb\
\x07\x74\x0e\xce\x8c\x74\xc8\x70\x61\x5f\xcf\xd2\xf7\x48\xf6\x9a\
\x93\x2d\x8b\x4b\x7c\x75\x1f\x7a\x95\x02\xd8\xfb\xbc\x0d\x88\x8a\
\x12\x20\x5e\x45\x54\xaa\x79\x34\x60\x94\xc4\x97\x63\x3f\x60\x27\
\xe1\x00\xc7\x8e\x88\xad\xb2\x54\xf8\xa9\x47\xbc\x62\x28\x9a\x13\
\x57\xf9\xee\x36\xf4\x2a\x05\x00\xfe\x5d\x73\xf0\x96\xe0\x53\x9a\
\x24\x09\x01\xfd\x10\x7b\xa6\xea\xb0\x3f\x5f\x1c\x87\x03\x3b\x92\
\xbf\x5c\xbc\x18\x19\x8d\xe1\xbf\x21\xd0\x2c\xf2\xb7\x58\xf8\xf5\
\xfd\x27\xdc\xb9\x61\xbe\x9b\x1b\x93\xa1\xf7\x29\x00\x3f\xd2\x2f\
\x95\x78\x5d\x34\x4f\x3c\x11\xec\x1a\x16\x32\x28\x9e\x05\x74\x2c\
\x32\x21\xa7\x62\xc1\xae\x59\xfb\x5d\xfc\x0f\x58\x18\x5b\x5f\xdd\
\x8b\x5e\xa7\x00\x20\xc0\x64\x29\x64\xaf\x23\x4b\xfb\x61\x69\x8d\
\x63\x6b\x93\x8d\x20\x9e\x87\x76\x30\x01\x22\x18\x82\x6c\x87\x2c\
\x7d\x0e\xb2\xd7\x95\xa6\x9a\xb0\x67\x2c\x40\xed\x2b\xf4\x3a\x05\
\xf0\xdd\x0b\xd6\x6f\x34\x39\x67\x49\x54\x64\x19\x89\x86\x4b\xec\
\x18\x8f\x44\x1d\x4b\x66\xce\xbf\xb5\xf0\x15\x7e\x9b\x58\x5d\x95\
\x82\xb7\x15\x18\xdf\xfd\xe5\x88\x7c\x37\x35\x66\x35\x7a\x9d\x02\
\x00\x1a\x1d\x7e\x9f\x00\x9f\x67\xc9\x42\xed\x2f\xd8\x2d\x88\x1c\
\x2f\x5e\xfc\x45\xbe\x5b\xda\x2b\xf8\xfd\x45\xf3\x31\x05\x48\xec\
\x2e\x31\x24\xcb\x82\x9f\x15\x12\xef\xc7\x4a\xb7\xfb\xd1\x3b\x15\
\x00\x60\x46\xa9\xe0\xad\xac\x7f\x14\x5f\x8f\x82\x70\x90\xc5\xf5\
\xa6\x3b\x84\x02\x15\xe0\x14\x15\x23\xf6\x20\xfb\x12\xf3\x77\x81\
\xa5\xf9\x6e\x67\x4c\x73\x7a\xa5\x02\x70\x85\x49\x94\xa6\x1e\xd9\
\x6b\xc8\x5c\xd3\xf9\xa8\xb0\x9d\x85\x6d\x1d\x0f\x48\x1d\x83\xdf\
\xb7\x35\xd8\x58\xd8\xee\xb9\xe6\xff\x96\x4a\x54\xca\xe2\x1e\xef\
\x6e\xf4\x4a\x05\xb0\xff\x2f\x86\x21\x33\x24\xfb\x57\xa6\xbc\xf4\
\x9a\x4b\x50\xc5\x7a\x48\xdf\x31\x97\xe6\x0f\x17\xad\xc8\x77\x73\
\x7b\x34\x7f\xff\xf9\x32\x9c\x01\xe2\x5b\x88\x8d\xb2\xf8\x5c\x2a\
\x24\x5e\x73\x61\xc4\xfe\xbf\x8e\xc3\x7f\xdd\x8d\x5e\xa9\x00\x20\
\x33\xf7\x34\xe6\x08\xde\xca\x52\x34\x24\x10\x7c\x2f\xb2\xc4\x60\
\x5c\x3a\xdf\x4d\xed\xd1\xd4\x15\x47\x04\x3e\xfc\xf7\x7d\x41\x41\
\x96\x62\x1f\x73\x65\xf6\x56\x1c\x74\xe9\x9e\xf4\x5a\x05\xe0\x08\
\xb0\x28\x5d\x2e\xf1\x97\x1c\x71\xe9\xaf\x98\xd8\x2d\x2e\x12\xb2\
\x6e\x64\x52\x7d\x77\x44\x7c\x33\xdb\xea\x3f\xc4\x5f\xfb\x19\x5f\
\xc4\xfe\x96\xee\x49\xaf\x55\x00\x3f\xb8\x68\x04\x8e\x04\xc8\xfe\
\x8c\x6c\x49\x96\x79\xe9\x50\xc9\x7e\xea\x44\xe2\xf9\x0b\x62\xff\
\x54\x7b\x78\xe1\x82\xa5\xa4\xd3\x66\x92\x1d\x24\xd9\xc8\x2c\x7d\
\x5c\x2e\xd9\x8b\xd5\x69\xb4\xff\xc5\xeb\xe7\xbb\xb9\x31\x59\xe8\
\xb5\x0a\x60\x35\xde\x41\xbc\x92\x2d\x04\x25\x71\x00\xb2\xed\x63\
\x2b\xa0\x7d\x48\x10\x04\x6c\x2e\x71\x50\x8e\x53\xfe\x83\xec\xdf\
\x71\xff\x76\x5f\x7a\xb5\x02\x28\xa8\x0b\x88\xa0\x46\xf0\x24\x50\
\x9f\xa5\x1e\xdd\x66\xc0\xe1\x2e\xe5\x82\xe7\x7f\x19\xe7\x04\xb4\
\x85\xe7\x7f\xf5\x05\x89\x60\x20\xc0\x4f\x80\xd1\x59\xa6\x58\x91\
\xc4\xd3\x41\xe0\xca\xd2\x2e\x76\x00\x74\x57\x7a\xb5\x02\xd8\xe7\
\xaa\xe1\x04\x5e\xd0\xff\x28\xf1\x66\x96\x97\xd4\x24\x0e\x0f\x12\
\xc1\x0e\x58\xfc\x92\xb6\x05\x61\xa4\xd2\x95\x5b\x48\x8c\x95\x48\
\x64\xe9\xdb\x39\x12\xbf\x4f\x47\xc6\x81\x97\xc5\xd9\x7f\xdd\x95\
\x5e\xad\x00\x00\x30\x23\x5d\x6b\x8b\xc1\x1e\x02\x8b\x9a\xcd\x53\
\xb1\xad\x25\x3b\xd6\x64\x89\xe7\x7f\xb9\x2c\xdf\xad\xed\x11\x3c\
\xff\xab\xa5\x84\x81\x02\xb0\xa3\x91\x8d\x59\x59\xef\x6f\x55\x9f\
\x02\xf6\x78\x41\x43\x30\xcf\xe2\xd1\xbf\x5b\xd3\xeb\x15\xc0\x0f\
\x2e\x19\x41\x50\x08\x92\x3d\x29\xf1\x6e\x8e\xba\xf4\x47\x46\xb0\
\x87\x03\x9e\xfd\xf9\x92\x7c\x37\xb9\xdb\xe3\x64\xa4\xa3\xe0\x4b\
\x82\x09\x82\xb0\x59\x98\x55\x7c\x24\xd9\xc3\xf5\x49\xf4\x83\xcb\
\xe3\xd1\xbf\x3b\xd3\xeb\x15\x00\xf8\x97\x72\xd0\xfc\xfe\x1f\x4b\
\x94\x20\xd2\x59\xcc\xd5\x91\x12\x67\xc9\xd9\x10\xc2\x30\xdf\xcd\
\xed\xd6\x3c\x7b\xde\x72\xe4\x82\x7e\x12\xa7\x29\x53\xf8\xa3\xc9\
\x21\xc4\x83\x81\xf4\xae\xc5\xc9\xff\xdd\x9e\x3e\xa1\x00\x0e\xbc\
\x6c\x04\xe5\x9b\x54\x21\x78\x58\xf0\xdf\x1c\x5e\xe9\xef\x83\x8e\
\x71\xe9\x02\x7b\xf6\x17\x71\xd1\xd0\x6c\x3c\x73\xde\x72\x12\xe9\
\x34\xe0\x0e\x46\x1c\xda\xec\x04\xdf\xaf\xb3\x05\x33\x23\x70\xff\
\x17\x8f\xfe\xdd\x9e\x3e\xb3\x38\x3b\x32\x51\xd0\x90\x58\x10\x25\
\xdc\x2d\x82\x9d\x4c\xf4\x6b\x72\x4a\x91\xe0\x4c\x0b\xd2\xff\xc6\
\xf4\xfa\x53\x67\x55\xf0\xe3\x6b\x07\xe5\xbb\xd9\xdd\x06\xdd\x2f\
\x9e\x7d\x7b\x05\xa9\x30\xb1\x23\xf0\x73\x83\x01\x4d\x15\xa9\x20\
\x65\xe2\xce\x21\xc9\xaa\x39\x65\xa9\xfe\xf9\x6e\x32\x00\xcf\x9c\
\xbb\x02\x20\x00\xf5\x03\x12\x98\x35\x84\xb2\x5a\x81\x0e\xb8\x62\
\x68\xbe\x9b\x97\x77\xfa\x94\x87\xe6\x99\x73\x57\x80\x18\x84\x69\
\x2a\x70\x48\x8e\xd3\x9e\x93\x98\x84\xb1\xf8\x47\x57\x0c\xcf\x77\
\x93\xbb\x0d\x5e\x90\x6c\x28\xb8\xdb\x80\xc3\x73\x9c\xf6\x47\xe0\
\x68\x60\xe9\x0f\xf3\xdc\x77\xcf\x9c\xb7\x0c\x8c\x90\x88\x2f\x83\
\xfd\x18\xd8\x19\x18\x88\xdf\x2c\xf6\x75\xc1\x93\x4a\x44\x1f\x98\
\x0b\xf4\xa3\x3e\x6c\xa9\xf4\x89\x29\x40\x23\xa6\x24\x32\x55\x08\
\xbb\x46\xf0\x49\x8e\xfd\xea\xbe\x07\xfc\x8c\x48\xc5\x4f\x9f\x13\
\x4f\x05\x00\x9e\x3e\x67\x19\x44\x51\x52\x72\xa7\x0a\x7e\x92\xb5\
\xdf\x60\x89\xe0\x2a\xc1\xd2\x7c\x67\xfd\x3e\x7d\xce\x72\x94\xb6\
\xc1\x4a\xdb\x59\x92\x3d\x26\xf8\x95\xe0\x40\xc1\xb7\x05\x3f\x95\
\xb8\x1c\xf1\xb8\xa5\xc2\x43\x82\x54\x10\x3c\xd3\x87\x9f\x73\x9f\
\xb2\x00\x00\x9e\x3a\x77\x05\x0a\x02\x0b\xa2\xe8\x4c\xe0\x72\x41\
\x41\x96\xd3\xaa\x10\xe7\x39\x74\x6b\x80\xb9\x1f\x5f\xd5\x77\x2d\
\x81\x27\x7f\xb6\x82\x86\x86\xc0\x0a\x0b\xa3\x63\x30\x6e\x00\xb2\
\xd9\xcd\x91\xc1\x65\xb2\xe0\x62\x50\xfa\xc7\x57\x0c\xcb\x4b\x5b\
\x9f\x3a\x77\x05\x41\x83\x43\x89\x60\x2b\x99\x2e\x00\x0e\x37\x3f\
\xb5\x6b\x86\x5f\xc0\xc8\x22\xc4\x09\xc9\x64\xe2\xe9\x86\x86\x14\
\x07\x5d\xdd\xf7\x2c\x81\x3e\x65\x01\x00\xfc\xf8\x8a\x61\x58\x24\
\x89\x60\x9a\x64\x4f\xe6\x58\xbf\x3e\x00\xec\xfc\x80\xe0\x90\x01\
\xc9\x94\x3d\xf3\xf3\xbe\x39\x42\x3c\x73\xee\x52\x3e\x5f\x3e\x94\
\xc2\x42\xf7\x03\xb0\xdf\x22\x1b\x9a\xa3\xbf\xfe\x24\xe9\x36\x9c\
\xcb\x9b\xf0\x3f\x73\xee\x32\x8a\x0a\x52\xa6\x84\xed\x25\x28\x41\
\x36\x0e\x59\x91\xb2\xb7\x97\xcc\xff\x1f\x09\x76\x51\x2a\x15\x6d\
\x6f\xd6\xe7\x44\x01\xe8\x83\x0a\x00\x80\x4d\x86\x80\x53\x29\x70\
\x31\xf0\x76\x96\x1a\xf6\x20\x36\x00\xae\xac\x6a\x28\xf8\xbf\x90\
\x14\x4f\xfd\xac\x6f\x29\x81\x27\xcf\x5c\x41\x38\x34\x60\xa3\xe1\
\xa5\xdf\x96\xb8\x06\xb1\x69\xb3\x5d\x97\xfd\xa9\x1f\x02\x17\x3a\
\x82\x45\xf9\xf2\x28\x3f\xf9\xb3\x15\x44\x51\x50\x5c\x5b\x9b\x9c\
\x28\xd9\x0c\xc4\x37\xfc\x4e\x24\xd0\x8a\x63\x17\x89\x5f\xc9\x31\
\xf8\xc9\xb3\xfb\x5e\x6d\x88\x3e\x37\x05\x68\xe4\x89\x9f\x2d\xa7\
\xc0\x92\xa4\x94\xfe\x11\x70\x07\x90\xab\x5a\xc5\x47\xc0\xa9\x75\
\xe9\xf4\xf3\x85\x61\xc8\x4f\xae\xe9\xfd\xd3\x81\x27\xcf\x2a\x43\
\x1b\xcd\xc2\x16\xed\xb8\x17\x70\x33\x30\x26\xc7\xa9\xa5\xc0\xe9\
\xb5\x61\xfa\xde\x22\x97\xd0\x4f\xae\xea\xda\xd1\xff\xd1\x9f\x2f\
\x25\x19\x25\x70\xa6\x0d\x0c\xce\x01\x8e\xc3\x3b\xfa\xda\x4a\x3d\
\x70\x5e\x10\x44\x37\x48\x41\x9f\x9a\xf2\xf5\x4d\x0b\x00\xf8\xc9\
\xd5\xc3\x49\x11\x11\x24\xec\x59\xe0\x32\xa0\x2a\x6b\xde\x8a\xd8\
\x4a\x70\x73\x61\x98\x38\x88\xba\xfa\xe0\x89\x33\x7b\x77\xba\xf0\
\x53\xe7\x2c\xa7\xa8\x5f\xa5\xb1\x68\xc7\xfd\x24\x6e\x23\x8b\xf0\
\x67\x9c\x7e\xf5\xc0\xb5\xe6\x78\xb0\x5f\xba\xeb\x85\xff\xc9\xb3\
\x57\xb0\x5e\xbd\xe1\xd0\x2e\x88\xbb\x25\xce\x20\x8b\xf0\xaf\xb6\
\xf0\xeb\x23\xe0\x45\x20\x95\x25\x7c\x59\x08\x9c\xe9\xa2\xf0\xdb\
\xa6\x80\xc7\xce\xec\x3b\xcb\xc3\xfb\xac\x02\x00\x38\xe8\xaa\xa1\
\x44\x0d\x72\x72\xdc\x2d\x71\x13\x50\xef\x0b\x5c\x34\x2b\x6a\xb1\
\x25\xd8\x2d\x14\x15\x4f\xb0\x30\x51\xf8\xe4\xd9\xa5\xf9\x6e\x7a\
\xa7\xf0\xe4\xd9\x65\xa0\x44\xa2\xb6\x76\xc0\x21\xc8\x6e\x07\x76\
\xc8\xe6\xf1\xf7\x6b\x2a\xb8\x47\x8e\x1b\x9d\x91\xfa\xf1\x35\x5d\
\x2b\xfc\x8f\xff\xac\x0c\x67\x41\x72\x45\x22\xfc\x09\xd8\x4c\xe0\
\x40\xb0\xb0\x79\x5b\x0d\x30\x01\xaf\x48\x3a\x56\x30\x49\xe2\x6f\
\x7e\x9f\xc2\x35\x4a\xc4\x21\xb1\x89\xb0\x8b\x9c\xd8\x3c\x0c\xfa\
\x4c\x7a\x4c\xdf\x9d\x02\xac\xce\x13\x67\x97\x62\x30\x58\xde\x12\
\x38\x4e\x22\x91\x6d\x71\xa0\x44\x19\x70\xbd\xcc\x6e\x40\x2a\x3b\
\xf8\xda\xde\x93\x48\xf2\xc4\x99\xa5\x20\x06\x28\xe0\x38\xe0\x17\
\x18\xeb\x35\xed\x82\x8c\x32\x94\x19\xf7\x01\x67\x01\x4b\x7f\x72\
\x4d\xd7\xf6\xc1\x63\x67\x96\x82\x6c\x90\x05\x3a\x11\xf8\x99\x19\
\x59\x5d\xf7\x12\x98\x51\x07\x3c\x04\x5c\x1c\x88\x79\x11\x60\xc6\
\x37\x11\x33\x65\x6c\xde\xec\x11\x0b\xc9\xb8\x0b\xe9\x6c\x8c\xaa\
\x9f\x76\xb1\x62\xcb\x07\xb1\x02\xc8\xf0\xf8\x59\xa5\xc8\x18\x61\
\xe2\x77\xc0\x78\x72\x67\x49\xd6\x03\x4f\x00\x97\xa8\xc0\xbd\x67\
\x0d\x01\x3f\xed\xc1\x8a\xe0\xc9\x9f\x95\xf1\xf9\xe7\xe5\x6c\xb8\
\xe1\xa0\x2d\x81\xf3\x80\xa3\xa0\x59\x96\x64\x23\x02\x1e\x41\x9c\
\x85\xb1\xb0\x2b\xef\xfb\xc9\x33\x97\x53\x93\x28\xa6\x28\xaa\xdf\
\x12\xf8\x35\x3e\x19\xa9\xa8\x85\x8f\x2c\x03\xae\x11\xdc\x6a\x50\
\xf1\xd3\x6b\x87\xf2\xc4\x99\xa5\x04\x91\xb3\x28\x11\x9c\x0c\x5c\
\x09\x14\x67\xf9\x5c\x2d\x70\x1e\xce\xdd\x84\x99\xfb\xe9\x75\xbd\
\x5b\x09\xc4\x0a\x60\x35\x9e\x38\xab\x14\x60\x84\xe0\xb7\xc0\x44\
\x44\x32\x6b\x0f\xf9\x39\xe4\x5b\x82\xcb\x4d\x7a\x5a\x58\xed\xc1\
\xd7\xf7\x3c\x25\xf0\xf8\x59\xa5\x60\x14\xe0\xd8\x17\xb8\x00\xf8\
\x1a\x96\xf3\x9d\x70\xf8\xd1\xf4\x67\xc0\xe7\x5d\x29\xfc\x0f\x9f\
\xb5\x9c\xea\x30\x6d\x83\xd2\x05\xdf\x32\xb8\x0c\x63\x4f\xb2\xbd\
\xbb\x8d\x73\x7b\x63\x16\xf0\x6b\x89\x67\x81\xf4\xc1\xd7\xad\x6a\
\xeb\x13\x67\x95\x62\x62\x80\x33\x6e\x40\x4c\xcc\xf1\x7c\x17\x02\
\xc7\x2a\xe1\xfe\x18\xd4\x25\xf8\xc9\x4d\x83\xbb\xec\x5e\xbb\x9a\
\x58\x01\x34\xe1\xb1\x33\x4a\x31\x34\x4c\x66\xbf\x04\x4e\x00\x5a\
\x4a\x6a\x2f\x07\xee\x13\xba\xc9\x99\xcd\x09\x84\x0e\xb9\xae\xfb\
\x2b\x82\x47\xcf\x2c\x25\x15\x44\x14\xb8\x70\x0b\xe0\x44\xe0\x58\
\xa0\xa5\x2c\x98\x06\xa0\x04\x74\x01\xd8\x92\x83\xbb\xf0\x1e\x1f\
\x3b\x6b\x05\x40\x31\xb2\xa3\xf0\x4a\x6a\xb3\x16\x4e\x8f\x80\xe7\
\x81\xf3\x8b\xd3\x7a\xab\x2e\x61\xfc\x34\x4b\x5b\x1f\x3b\xb3\x14\
\x83\x6d\x04\xf7\x02\x5f\xcd\x71\xad\xd7\x04\x63\x0d\x3e\xec\xca\
\xfb\xed\x6a\x62\x05\x90\x85\x47\xcf\x2c\x05\xa3\xbf\x89\x93\x81\
\x5f\x20\xcb\xfe\x06\x98\x40\x08\xec\x7d\xe0\x36\xe0\xe1\x81\xd6\
\x6f\x49\xb5\xea\xf8\xc9\xf5\xdd\x6f\x21\xd1\xa3\x67\x95\xb1\xc5\
\x9c\xc1\x7c\xb2\x6d\xc5\x70\xa4\x83\x80\x93\x80\x5d\xfc\xbe\x5e\
\x39\x5e\x05\x53\x25\x70\xa3\x64\x57\x83\xca\x0e\xe9\x22\x4b\xe7\
\xe9\x33\x6a\xa8\xa7\x8e\x80\x70\x03\xd0\xcf\x80\xe3\x30\x0d\xca\
\xda\x4e\x03\x50\x05\x70\x97\x61\x57\xe1\x82\x25\x2e\x59\xcb\x21\
\x57\x6f\x90\xf5\xda\x0f\x9d\x55\x46\xb1\x13\x29\xb3\xfd\x81\x7b\
\x10\x1b\xad\x79\x86\x1a\x2f\x3a\x03\x71\x1a\x46\xf9\xc1\xd7\x0f\
\xe9\x92\xfb\xee\x6a\x62\x05\x90\x83\xc7\xcf\x28\x07\xb3\x02\xc9\
\x1d\x2c\xf8\x0d\xb0\x2d\xb4\xd8\x61\xf5\x82\xd7\x81\x7b\x14\xf0\
\x5c\x10\x34\x2c\x95\x4b\x70\xc8\xb5\xf9\x9f\x43\x3e\x7a\x7a\x05\
\x2e\x2a\x20\x0c\xeb\x87\xca\xf4\x5d\xe0\x38\x83\x6f\x91\x63\x0e\
\xbd\xca\x92\xe6\x33\xe0\x12\xb0\x7b\x41\xb5\x5d\x25\x04\x8f\x9d\
\x51\x4a\x31\xe5\xd4\x32\x64\xe7\xcc\x74\xec\x07\x06\xcd\x0a\x35\
\xac\xd6\xce\x4f\x80\xdf\x22\xee\x07\xab\x3d\xf8\x86\xb5\x9b\xec\
\x8f\x9d\x59\x8e\x99\x05\x72\xee\x54\xc1\xe5\x40\x71\x16\xa7\x67\
\x3d\x70\xb1\x49\x57\x82\xa5\x0f\xbe\xa1\x6b\xee\xbf\x2b\x89\x15\
\x40\x0b\x3c\x7a\x66\x39\x24\x0b\xa0\xa1\xfe\xeb\xc0\x25\xc0\xde\
\x88\x70\x2d\xbd\x56\x87\xf8\x17\xf0\x00\xf0\x3c\x72\x0b\xc0\xa2\
\x43\x6e\xec\x7a\x33\xf2\xb1\xd3\x57\x10\xa0\x20\x4d\x38\x12\x6c\
\x7f\x43\x47\x02\x7b\x62\x2d\x4c\x6b\xfc\xe0\xe7\x80\x7f\x02\xbf\
\x26\xd9\xf0\x57\xd2\x09\x77\x48\x17\x39\xc3\x1e\x3d\xbd\x0c\x83\
\xa4\xe0\x40\xe0\x62\x8c\x9d\x72\xb6\xd3\x97\x26\x7c\x05\xf8\x95\
\xac\xfe\x1f\xa6\xa4\x0e\xb9\xbe\xf5\xed\x7c\xe4\x8c\x32\x30\x0d\
\x30\x67\xd7\x00\x53\xb2\xfa\x3f\xc4\x72\xe0\xe4\x97\x77\x1d\xfc\
\xd0\x37\xdf\x2c\xe7\x90\x5e\x66\x09\xc4\x0a\x60\x2d\x3c\x7a\xc6\
\x0a\x82\xa8\x00\x05\xd1\x48\xa1\xd3\x80\xe3\x04\xc3\xa0\xe5\xce\
\x93\x9f\x37\x7f\x80\x9f\x93\x3e\x07\x7a\x8b\xd0\x95\x22\xd3\xa1\
\x9d\x28\x4c\x8f\x9c\x5e\x0a\xe6\x0c\x85\x83\x81\x1d\x81\x1f\x00\
\x07\x00\xdb\x03\x85\xb9\xda\xbc\xda\x68\x5a\x01\x94\x98\xb8\x26\
\x1c\x3c\xe0\xd3\x74\x59\x05\x87\xdc\xd4\x35\xc2\xff\xc8\xe9\x65\
\x20\x06\x63\x9c\x08\x9c\x6d\x39\xfc\x12\x19\x03\xbd\x0e\x78\x10\
\xb8\x24\x11\xd8\xbc\x86\x48\x1c\x76\xe3\x90\x36\x7f\xe7\xa3\xa7\
\x97\x81\xb4\x39\x66\x25\xc0\x5e\x39\xbe\x6f\x2e\x30\x5e\xd2\x6b\
\x61\x10\x72\x70\x37\x9c\xde\xb5\x97\x58\x01\xb4\x92\x47\x4e\x2b\
\x07\xac\x10\x73\xfb\xe1\xd3\x4e\xf7\xa4\x75\x05\x55\x84\x28\x07\
\x66\x01\x7f\x07\x5e\x11\x9a\x15\x88\xe5\x61\x54\x57\xeb\x82\x04\
\x07\xdf\xbc\x5e\xbb\xdb\xf5\xd0\x19\xa5\xb8\x40\x24\x22\x2b\x92\
\xb3\x61\x06\x3b\x60\x7c\x03\xff\x32\xef\x82\x31\x94\xd6\x3d\x67\
\x07\xbc\x01\x5c\x0d\x3c\x03\xd4\x1e\xda\x45\x26\xef\x63\xa7\x95\
\x53\x1f\x38\x92\x0a\xb6\x42\xfa\x35\x70\x18\x96\x23\xc4\xe7\xa5\
\x7f\x29\x70\x0d\xde\xef\x52\xb1\x2e\xed\x7c\xf8\xd4\x15\x58\x10\
\x82\xb4\x17\x46\x09\x62\xf3\x1c\x91\x81\xbf\x81\x4d\x04\xcd\x3b\
\xb4\x1d\x8a\xa6\xbb\x12\x2b\x80\x36\xf0\xd8\xa9\x65\xbc\xb6\xc9\
\x60\xbe\xb6\xa0\x62\xa4\xd0\xc5\xc0\x24\x5a\x55\x4f\x5c\xab\xfd\
\x62\x55\xc0\xe7\xc0\xdb\xf8\x6d\xb3\xe7\xc8\x34\x0f\xb1\x08\xac\
\xc6\xb0\x7a\x99\x52\x52\x94\xb6\x22\x1c\x9f\xd6\x8b\x51\x49\x93\
\x0a\x03\x83\x50\xce\x25\x81\x42\x33\x2b\x06\x6d\x88\xd8\xc2\x60\
\x34\xb0\x13\xf0\x25\x60\x63\xd0\x40\x56\x3e\xdb\xd6\x3e\x62\x3d\
\x8c\xe9\xe7\xc5\x0d\xc9\x4f\xeb\x92\x69\x0e\xe9\xa2\x97\xfc\xa1\
\x33\x4a\x89\x0a\x1b\x2c\x51\x5b\xb4\x97\xc1\xa5\xc0\x9e\x7e\xc9\
\x5e\xd6\x34\x24\xf0\x8a\xf4\x02\x0b\x82\xdf\x23\xa5\x0f\x69\xc5\
\x7c\x7f\x6d\x3c\x72\x5a\x19\x4e\x66\x81\x69\x22\x70\x2d\x58\xae\
\x21\xfe\x41\x8c\x53\x11\xcb\x0e\xbd\xb1\x77\x84\x06\x63\x05\xd0\
\x0e\x1e\x3e\xb5\x1c\x7c\xe8\xec\x0e\x8c\xe4\x3a\x5d\x4c\xd4\x01\
\xd5\x78\xd3\xfb\x8b\xcc\x51\x0e\x54\xe1\xa7\x11\x69\xbc\x03\xac\
\x00\x18\x00\x0c\x06\xd6\x03\xd6\xcf\xfc\xde\x1f\x28\x6a\x21\x7e\
\xdf\xda\x76\x9c\x03\x5c\x7d\x58\x17\xc6\xbc\x1f\x3a\xb5\x1c\xcc\
\x8a\x4d\x3a\x0a\xe3\x02\xc4\x66\x39\xf3\x2e\xcc\x87\xf8\x0c\xce\
\xef\x97\x2c\x7a\xab\x26\x55\x47\x4b\x42\xf8\xd0\x69\xe5\xc8\xb0\
\xc0\xd9\x20\x20\x29\xa9\x02\xac\xe1\xf0\x9b\xb3\xcb\xf6\xa3\xa7\
\x96\x63\x50\x18\x19\x17\x02\x3f\xcb\x9a\x03\x22\xd2\xc0\xf5\xa0\
\x0b\xc1\x6a\xba\xb2\xaf\x3a\x8b\xbe\x93\xf4\xdc\x59\xac\x7b\xe1\
\xdb\xa2\xcc\x31\x1c\xd8\x22\x8f\xed\xe8\x32\xee\x3f\xbd\x9c\x84\
\x33\x64\x6c\x60\xca\x84\xf8\xc4\xa0\x16\xee\xa3\x12\x71\x97\xb0\
\x2b\x03\xb3\x25\x2b\xea\xea\x18\x7b\x4b\x6e\xe1\x7b\xf8\xd4\x72\
\x24\x06\x04\xe2\x08\xd0\xe1\xc0\x10\x33\xfe\x2d\x74\xd3\xa3\xa7\
\x94\xcf\x4e\x05\x70\x64\x13\xe5\x71\xc8\x4d\x83\x79\xf8\xd4\xf2\
\x7a\x64\xd7\x80\xb6\x00\x8e\xc8\xd2\x96\x04\x70\x22\xd8\x62\x17\
\xda\x8d\x0f\x9d\x56\x91\x3a\xfc\xc6\x9e\xed\x0f\xe8\xd3\x8b\x81\
\xda\x4b\xd3\xc5\x42\x4d\xf6\x18\xa8\x14\x7c\xac\xc6\xad\xc8\xc8\
\xe3\xb1\x66\x3b\xd3\x82\xf9\x82\xd2\x96\xda\xd5\xd9\x3c\x74\x72\
\x19\x05\x6f\xfe\x07\x49\xbb\xe0\x74\xb7\xe0\x0c\xc1\xa0\x9c\x6d\
\x87\x4f\x04\x67\x22\x3b\xdf\xc4\x92\x43\x6e\x1c\x98\x53\xf8\x1f\
\x38\xa9\x92\x47\x4f\x2b\x03\xb1\x31\xe2\x4a\x89\xeb\x05\xfb\x0a\
\x76\x17\x9c\x88\xb8\x35\x82\x6d\x03\x67\xdc\x77\x46\xf3\xfd\x1f\
\x0e\xbb\x69\x30\x92\x96\x23\x7e\xad\xcc\x7e\x92\x59\xfa\xa8\xbf\
\xe0\x3c\x4b\xeb\x28\x35\xc8\x1e\x3a\xb9\xbc\x0b\x7a\xad\xf3\x88\
\x15\x40\x7b\x58\x5d\x5a\xd4\xec\xdf\x1f\x23\x0e\x47\x9c\x00\x3c\
\x81\xf8\x1c\x91\xce\x8b\x06\xf0\x99\x71\x4b\x80\xe7\x10\x67\x22\
\x0e\x45\xbc\xd5\xac\xdd\x5d\xa4\x01\x1e\x3e\xb5\x9c\x20\x0c\x92\
\xd1\x98\xdd\x0f\x02\x66\x00\x07\x22\x12\x39\xda\x21\xe0\x65\xc4\
\x84\x64\x2a\x75\x0f\x8a\x6a\x0f\xbb\x39\xf7\x68\xfb\xf0\xc9\x15\
\xf4\x8b\x8c\xc8\xd9\x57\x05\xd3\x80\xe3\x81\xfe\x4d\xae\xf9\x1d\
\xe0\x4a\x70\xa3\x12\xe9\xc2\xec\x17\x72\xa0\x80\x0f\x80\x9f\x03\
\x73\x73\xf4\xd3\x70\xe0\xb7\x96\xe0\xc0\xe7\xdc\x20\x1e\x39\xa5\
\xe7\xae\x0e\x8d\x15\x40\xbb\xc8\x6c\x7f\xb5\x6a\x1b\xac\xd5\xff\
\x1d\x09\x3e\x91\x63\x3a\xb2\xb1\xc8\x7e\x80\x5f\xab\x7e\x2f\xd8\
\x5c\x7c\xdd\x01\x29\x53\x96\xaa\xe9\xb1\xc6\xf5\x72\x1c\x2b\xcf\
\x6b\xfe\x79\xe1\xfd\x09\xf3\xf0\x0b\x96\x7e\x0e\x1c\x60\xc6\x11\
\xe6\x82\x9b\x85\xcd\x91\x5f\xca\xbb\xda\xf5\x58\xd5\x76\x75\x9e\
\x4b\xe8\xc1\x93\x2b\x70\xce\x06\x45\x11\x67\x49\x76\x17\x30\x26\
\xe7\xbd\x79\xbf\xc8\x0c\x60\x3c\x96\xfc\x5b\x2a\x11\xea\xb0\x5b\
\x72\xe7\x51\x3c\x74\x72\x05\x82\x82\xda\x84\x3b\x02\xd9\x0c\x60\
\x5f\x89\xa0\xf9\xf5\x01\xf1\x43\x14\x5c\x2c\xc2\xa1\x0f\x9e\x52\
\xd1\xec\x5a\x87\xdf\x3a\x18\x9c\x48\x96\xfd\xe8\x55\xfc\xe2\xa8\
\xc5\xd9\x9f\x0b\x1b\x0b\xbb\xfa\x07\x41\xe5\x3e\x15\x35\x55\x3c\
\x7c\x6a\x55\xa7\xf5\x5d\x67\x12\xfb\x00\xda\x81\x72\xfe\xa3\xf1\
\xdf\xde\x89\x7d\xf8\x2d\x83\xaa\x1f\x9b\xb0\xfc\xed\x2f\x76\x1a\
\xf6\xf6\x90\x8f\x2b\xee\x36\x6c\x03\x7c\x6c\x7e\xe7\xcc\xcf\x6d\
\x81\x8d\xf0\xce\xbd\x22\xa0\x40\x6a\x95\x33\x4f\x12\x29\xfc\xca\
\xb5\x1a\x60\x21\xbe\x34\xd7\xbb\x92\xbd\x0d\xbc\x03\xb6\x58\x41\
\xa2\x06\x39\x8e\xbc\xa9\x3f\x0f\x64\x5e\x76\x9f\xbd\x6c\xab\xb5\
\xdb\x9a\xfc\xec\x58\x1e\x38\xa5\x8a\x64\xba\x9e\x34\xac\xb1\x8a\
\x4f\xb9\x95\xcd\x52\x61\xd7\xe0\xb8\x1d\x28\x3f\xe2\xd6\x7e\x2d\
\x5f\xff\xe4\x4a\x9c\x18\x66\xa6\xd3\x80\xd3\x04\x43\x1b\x15\x59\
\xf3\x02\x2f\x06\x7e\xd0\x1b\x87\xf4\x85\x39\xbb\xe4\xa1\x93\x2a\
\x6b\x0f\xbf\x75\xcd\x3a\x22\x87\xdf\x3c\x84\x87\x4e\xae\xc0\xe0\
\x29\x27\x46\x02\x97\xa9\xd1\x47\xb1\xe6\xb5\xb6\x05\x6e\xec\x5f\
\x3c\xf8\x84\xca\xc0\xbd\xfc\xe8\x49\x15\x1c\x72\x6b\xcf\xf2\x09\
\xc4\x0a\x60\x1d\x59\x99\x35\xde\xe4\xff\x35\x72\xf0\xf4\x95\xe5\
\xa5\xea\x81\xf9\xf7\x9f\x5c\x3b\xdf\x91\x78\x3e\xa4\xae\xd0\x60\
\x80\xd0\x30\x83\x8d\x81\x51\xc0\x06\xf8\xaa\xbb\x83\xf0\x4b\x72\
\x0b\xf0\x11\x80\x08\x1f\x11\xa8\xc5\x47\x0b\xca\x10\x4b\x64\x2c\
\x32\x58\x60\xb0\xcc\x64\x55\x38\x57\x87\x19\x87\xdd\x36\x28\x7b\
\x43\x1b\xdb\xbb\x7a\xa3\xb3\xdd\x40\x07\xf1\xd0\x89\x65\x24\xd3\
\xb5\x56\x1f\x14\xee\x85\xb8\xd4\x7c\x16\x62\xf3\x65\xf8\xca\x34\
\xc1\x78\x07\xb8\xc0\xd0\xb3\x32\xd2\x47\xb4\xe0\xe8\x7b\xe0\xa4\
\x72\xc2\xa0\x3f\x91\xab\xd9\x0e\xb8\x48\xe2\x20\xb3\xac\x15\x9e\
\xb3\x91\x94\x38\x55\xa6\x25\x04\xba\xe5\x81\x93\xcb\xd3\x47\x36\
\xf9\xae\xc3\x6f\x19\xc4\x83\x27\x57\x44\x06\x53\xe5\x13\x92\x7e\
\x21\x51\x94\x25\xe8\xbb\xa3\xc4\x8d\xfd\x52\x9c\xa8\xa4\x5e\xbf\
\xff\xc4\x0a\x8e\xba\xad\xe7\x28\x81\x58\x01\xb4\x07\xad\xf1\xa3\
\x4d\x53\xe7\xa3\x6e\x59\xb9\x04\xbd\x3e\x73\x2c\xc7\x67\x0c\x02\
\xf0\xe0\x89\x95\x08\x99\xcc\x42\x73\x04\x86\x4c\x98\x64\x38\x87\
\x45\x5f\xfd\x24\xd4\x7d\x5f\x4d\x71\xd1\x45\x6d\x7c\xc9\xd4\xe4\
\x57\x35\xf9\x5b\x07\x2b\x81\xfb\x4f\xaa\x20\x82\x62\x67\x85\x2b\
\x57\xf1\x35\xfb\xde\x55\xa4\x05\x2f\x20\x2e\x98\x93\x2e\x7c\x73\
\x9b\x44\x3d\x47\xb7\x30\x92\x3e\x78\x42\x05\x96\xc6\xa2\x44\xcd\
\x7e\xf8\xb5\x02\x5f\x81\x6c\x23\x7e\x8b\x0c\x00\xce\xc7\xd9\xd2\
\x64\x7d\xc1\x03\x0f\x9e\x58\xa9\x23\x6e\x5b\xd3\x12\x38\xe2\x96\
\x41\x3c\x70\x62\x45\x1d\xe2\x5a\x02\x86\x01\x27\x49\x59\x65\x66\
\x57\xe0\xd6\x74\xca\x4e\xdb\x70\x59\xfa\x95\x7b\x4f\xad\xe4\x98\
\x9b\xda\x53\x9a\xb0\xeb\x89\x15\x40\x3b\x58\x39\x62\x75\x02\x99\
\x97\x50\xf8\xf8\x7f\x76\x9e\x6f\x4f\xa3\x57\xfb\xdd\xd6\xf2\xef\
\x75\x60\xfa\x89\xcb\x29\xa6\x88\xc8\x45\x1b\x98\x65\x0a\x75\x1a\
\x03\x73\x5e\xdf\xa8\x04\xee\x44\x5c\x25\x0b\x96\xec\x60\x35\x1c\
\x76\x6b\xee\xf9\xfe\xd3\x3f\xaf\xa4\xaa\x42\x58\xc8\x9e\x88\xdb\
\x30\xb6\x6c\x76\x7f\xab\xae\x5d\x0f\xbc\x82\xd8\x0a\x63\xb3\x66\
\xe7\x18\x23\x10\x97\xa5\x0a\x1a\x96\x17\x0d\xad\xfc\xc3\x7d\xc7\
\x55\x73\xf4\x9d\x6b\x2e\x93\x38\xf2\xb6\x41\x3c\x70\x52\x45\x25\
\x7e\x2d\xc8\x10\xe0\x18\x94\xc5\x77\x66\x7c\x19\xb8\x6d\xf1\x88\
\xc4\x69\xc7\xdc\x34\xe0\xaf\xf7\x9d\x58\xc9\xd1\xb7\x75\x7f\x25\
\x10\x3b\x01\xdb\x81\x30\x1c\x96\xa5\xf2\xb4\xff\xb7\xcb\x1c\xdd\
\x89\x35\xc2\x96\xcd\xc2\x97\x86\x53\x9b\x47\xd0\x66\x3c\x70\x42\
\x15\xdb\x2e\x1b\x46\x84\xdb\x19\xb3\xbb\x85\x9d\x2e\x18\xd8\x3c\
\x9c\xb6\xb2\xdf\x3e\x91\x8f\x4e\x5c\x00\x2c\x39\xea\xd6\x01\x1c\
\x76\x47\xcb\x8b\xa6\xaa\x2a\xc1\x82\x20\x10\x76\x98\xb0\x2d\xb3\
\x86\x63\xfd\xf5\xcb\x24\xae\x34\xc7\x61\x82\x73\x25\x96\x36\x7b\
\x5e\xde\x1f\xb2\x99\xb0\x6b\x6a\x4b\x07\xed\x81\xc1\xfd\x27\x37\
\x77\x0c\x1e\x79\xeb\x20\x90\x2d\x43\xf6\x0b\x89\x47\xfc\x57\x66\
\xb9\x96\x18\x03\x76\xfb\xfd\x27\x55\x1f\x90\x28\x94\x3d\x70\x72\
\x65\xbe\x1f\xfb\x5a\x89\x15\x40\xbb\x58\xdd\xab\xdc\xc4\xcb\xdc\
\xe8\x60\xeb\x76\x89\x39\x39\x22\x17\xab\xd9\x32\xeb\xd2\xe4\x07\
\x4f\xac\xa2\x32\xaa\xe7\xd3\x11\x55\x7b\x23\x4a\xf0\xab\xf9\x12\
\xcd\xbe\xcb\x3b\xe8\x04\xf6\x0a\xd8\x84\x42\x15\xdd\x63\x84\xb5\
\x47\xb6\x61\xde\x2c\x29\x09\x0c\x5f\x33\x9a\xb1\xc6\xf5\x3f\x02\
\x3b\x49\x66\x97\x3a\xb3\xe5\xb8\xe0\x51\xb0\x4b\x80\xca\xe6\xf7\
\x0f\x88\x1d\x81\xeb\xcc\xdc\x4e\x48\xdc\x77\x5c\x6d\xb3\xef\x3c\
\xf2\xb6\x81\x48\x2c\x42\xf6\x33\xb0\xa7\xf0\xd5\x11\xb3\x7c\x3f\
\xa3\x91\x6e\x8f\xea\xec\x68\x53\x90\xb8\xff\xc4\xee\x1d\x1d\x88\
\x15\xc0\xba\x90\x25\x17\x60\x5d\x47\xd1\x2e\x69\x76\x93\xb8\xf6\
\x1a\xff\x6e\x27\x91\x44\xff\xb0\x60\x47\x27\xae\x03\x76\xce\x16\
\x3f\xcf\xf4\x4d\x1d\x62\x06\x62\x5c\x41\x51\xdd\xdf\x6a\x5d\x4a\
\x47\xdc\xd6\xfa\x9d\x84\x85\x50\x14\xd4\x4b\xbc\x25\x70\xd9\xf2\
\x18\x24\xfe\x2a\xec\x31\x9c\xd5\x1f\x75\xdb\x40\x64\x8a\x04\x77\
\x21\x6e\x20\x93\xa0\x95\x25\x7f\xe3\x6b\x82\x6b\xe5\x82\x2d\x09\
\xd2\xdc\x73\x7c\xf3\xd1\xbb\xa6\x22\x89\x05\x2c\x40\x76\x26\xe2\
\x69\x7c\x34\x26\xdb\xb5\x36\x16\x5c\x1f\x39\x77\x8a\x93\xeb\x77\
\xdf\x09\xdd\xd7\x12\x88\x15\x40\x3b\x68\x66\x4e\xaf\x76\x74\x77\
\x56\x2b\x83\xbd\x46\x59\xec\x75\xe1\xbe\xe3\x2b\x31\xa7\x10\x71\
\x2c\x62\xe7\x1c\x9b\xae\x02\x2c\x13\x5c\x2c\xd9\xe9\x92\xcd\x3b\
\xe4\xba\xf5\x38\xe6\xce\xb6\x6d\x23\x7e\xf4\x6d\x83\x20\x70\x80\
\x9e\x00\xde\xcb\xb2\x49\x29\x88\x7d\x71\xda\xa9\xf1\x4b\x8f\xbe\
\x7d\x20\x26\xd5\x61\x5c\x2d\x98\x8e\x70\x39\xda\xb7\x2f\xe2\x6a\
\x13\x1b\x67\x4b\x13\x9a\x7c\x7f\x11\xa9\x82\x04\x38\xf7\x09\x70\
\xaa\xe0\x11\xc8\x79\xad\xe1\x88\x4b\x4c\x76\x89\x49\x23\xee\x3b\
\xbe\x62\xad\xf7\x96\x0f\x62\x05\xd0\x0e\x5a\x14\xf8\x6e\xaa\x04\
\x56\x6f\x6f\xd6\x26\xae\x83\x22\x10\xe0\xcc\xd6\x17\x7c\x2f\xd7\
\xdf\x81\x59\x88\x29\x42\x57\xcb\x5c\xf9\xd1\x77\x0c\x68\xff\xcd\
\x38\x18\xf2\x45\xf2\x43\xc4\x0c\x7c\x88\xb4\xe9\x97\x6d\x26\x38\
\x36\x2d\x92\x33\x8f\xf3\xa3\xef\x51\xb7\x0f\x42\x8e\x72\x73\x5c\
\x88\x17\xdc\xe6\x77\xeb\x73\x30\x0e\x72\x70\x0d\x68\xd4\xbd\x59\
\x84\x76\xdc\xf5\xc5\x1c\x75\xe7\x20\x24\xfb\x4c\xb2\x33\x10\xf7\
\x2b\x9b\xc3\xd6\x5f\x7d\x00\x70\x9a\xc3\x6e\x76\xc6\xd6\x33\x4e\
\xa8\xe4\xde\x13\xba\x97\x22\x88\x15\x40\x3b\x58\x25\x2b\x4d\x1d\
\x41\xb6\xea\x67\x27\x66\xd5\xad\x4b\x9b\xfd\x08\x99\x7b\xdb\xbc\
\xf6\x61\x80\x6d\x00\xb6\x41\xf3\x6b\x1a\xc0\x0b\x82\xb1\x83\x0b\
\x96\x3e\x69\x22\x75\xcc\xed\xeb\x16\x27\x3f\xfa\xce\x81\x94\xae\
\x97\x46\xf0\x90\xb0\x37\x95\xe5\x7e\x10\x07\x87\xb0\xfb\xea\x4f\
\xe1\xe8\x3b\x06\xe2\x8c\x25\x82\x73\x05\x2f\x28\x7b\x3f\x18\xe2\
\x10\x29\xb8\x4a\x04\x1b\xdc\x7b\x7c\xf6\x39\xfc\xd1\x77\x0c\x40\
\x68\x91\x13\x67\x01\xb7\x4b\xd4\x65\xbf\x9e\x25\x80\xc3\xcc\x05\
\xd3\x03\x67\x7b\x25\x28\xb2\xfb\x8e\xaf\xe9\x94\x67\xdc\x1e\x62\
\x05\xd0\x46\xee\x3b\xa1\x02\x9c\x81\x28\xf0\xee\xe6\x26\x69\xb4\
\x4d\xff\xdd\xdd\xf0\xe9\xc2\x64\xd9\x31\x37\x81\x8c\xfb\x8e\x6f\
\x87\xd3\x4a\x60\x2b\xdd\xe2\x4d\xaf\x8b\x43\xf6\x5f\xe0\x9d\xb2\
\xfa\xf5\x39\xfa\x8e\x8e\x49\x92\x91\x02\x12\xa4\xe7\x23\xa6\x03\
\xa9\x66\xdf\x8b\x8d\x04\x9b\x6c\x58\xe1\xbd\xc7\xad\xba\x27\x17\
\x00\xce\x3e\x95\xec\x6c\xe0\xd5\xac\xbb\x1d\x63\x01\x70\x84\x89\
\xdf\x21\xd6\xcf\xa5\x04\xc6\xde\x31\x10\xb0\xa5\x92\xfd\x0a\xec\
\x77\x40\x45\x96\xfb\x07\x99\x01\xdf\x00\xa6\xa7\x5d\xc3\xb8\x08\
\x57\x34\xb3\x3d\xfd\xdc\x09\x74\xd3\xb7\xb4\x7b\x31\x73\x4a\x15\
\xfe\x69\x06\x83\x40\x3b\x02\xdf\x06\x7e\x02\xec\xde\x2c\xb3\x4d\
\x7c\x06\x9c\x28\xf1\xd7\xba\x01\xfd\xab\x07\x96\x96\x72\x64\x49\
\x7e\x0b\x83\xce\x9c\x52\x85\x41\xa1\x7c\x26\xde\xcd\xc0\x0e\xcd\
\x4e\x12\x6f\x02\x0f\x03\x2f\x01\x73\x08\x5c\x19\x32\x8d\xbd\x73\
\xed\xb1\xec\x99\xc7\x55\x61\x30\x4a\xf0\x27\xc4\xf6\x59\x52\x23\
\xbf\xc0\x38\x2e\x91\x6a\x78\xaa\xa1\xa0\x98\xf1\x77\x14\xaf\xf5\
\x9a\xad\xba\x2f\x2f\xd8\x23\x81\xc7\x10\x7b\x64\xf9\xde\x65\xf8\
\xea\x42\x2f\x85\x55\x75\x1c\x75\xbf\xaf\x30\x36\xe3\xb8\x0a\x02\
\x85\xc8\xf4\x55\xe0\x0e\xc4\x2e\x39\x24\x21\x42\xdc\x2f\xe3\xe7\
\x26\x16\x8f\xbd\x2b\xfb\xb4\xc5\x2b\x18\x15\x09\x9b\x08\x5c\x80\
\xd8\x30\xa7\x64\xf9\xea\x50\xb7\x1a\xba\x5e\x41\xf8\x45\x58\x57\
\xc7\x51\x79\x7c\x3f\x62\x05\x90\x83\x69\xc7\x57\x10\x05\x91\x15\
\x44\xc9\xa1\x26\xc6\xe0\x85\xde\x97\xd9\xf2\xe9\xba\xb9\xac\x27\
\xe1\x77\xcd\xfd\x2b\xf0\xb0\x8c\x7f\x08\x5b\x02\x44\xe3\xdb\xe8\
\xf0\x5a\x17\x66\x1c\x5f\x89\x25\x9d\xa9\x3e\x1c\x61\x7e\xf4\x39\
\x18\xd8\x0f\x5f\x48\xa4\xa5\xcd\x3f\x4a\xf1\x95\x8a\x5e\x04\xfe\
\x2c\x63\x8e\x0b\xa2\x32\x93\x69\x7c\x8e\xd1\x7b\xc6\x71\x95\xc8\
\x94\x08\x5c\x70\x1d\x70\x4a\x8e\x6b\xbf\x09\x1c\x63\xb2\x77\x97\
\x0e\x2c\xe3\xcc\xeb\x36\x62\x5d\xb9\xff\xb8\x0a\x1a\x5c\x82\xd0\
\xdc\x24\xe0\x16\xfc\x26\x9f\x4d\x79\xc0\x89\xc9\x06\x35\xe3\xee\
\x5e\x25\xc0\xf7\x4d\xac\xa1\x66\x50\x1d\x45\xd5\x05\xdf\xc4\x97\
\x16\xdb\x29\xc7\xd7\x38\xe0\x21\x99\xce\x91\x0b\x16\x8e\x3b\xe1\
\x4a\x6c\xf7\x8b\xb2\xf4\x41\x35\x51\x10\x84\x89\x28\x3a\x00\xf8\
\x1d\xbe\x06\x63\x2e\xd2\xc0\x1f\x80\x0b\x93\x75\xf6\xdf\x74\x81\
\x74\xcc\xd4\x75\xf0\x89\xac\x03\xb1\x02\x58\x8d\x92\x29\x55\x00\
\xa1\xc9\x86\x81\x76\xc6\x0b\xfc\x77\xf0\x2f\xc7\x90\x36\x57\xdd\
\x11\xb5\xf8\x82\x92\x2f\x48\xfc\xc9\xb0\xb7\xe5\x53\x7f\xa3\xf1\
\x53\x3b\x5e\x19\xcc\x98\x5c\x09\x10\x42\x30\x04\xb4\x53\xa6\xed\
\xdf\x07\xc6\xb4\x58\x09\x38\x7b\xdb\xd7\x50\x06\x86\xfd\x19\x6c\
\xb6\x85\x41\x05\x92\x8e\xb9\x73\xcd\x51\x7c\x86\xef\xbb\x5d\xf0\
\x56\xc4\x36\x59\xae\x07\xf0\x08\x66\x27\x02\xcb\xc7\xdd\xd5\x31\
\xf7\x3f\x63\x4a\x35\xc0\x08\xd0\xc3\x88\xbd\xb3\x58\x01\x65\x82\
\xa3\xcc\xf4\x7c\xd4\x90\xe4\xd8\x92\x55\xa5\x06\x67\x4e\xa9\xa6\
\x3a\x6a\xa0\x5f\x98\xfc\x0e\x7e\x27\xe4\xed\xb2\x2e\xec\xf0\x93\
\x9b\xc7\x10\x67\x4b\xcc\x2f\x4a\x18\x87\xdf\xd1\xbc\xfd\xf7\x4f\
\xaa\x64\xe7\x85\x75\xfc\x77\x93\xe2\xaf\x0a\xae\x44\xec\x05\x2d\
\xbc\x35\x62\x0e\x70\x09\xe8\x31\xb0\xba\xd5\x15\x54\x57\xd1\xe7\
\x15\xc0\xcc\x29\xb5\x00\x09\x49\x23\x40\xbb\x02\x7b\xe3\x47\xfb\
\xd1\xf8\x45\x39\x1d\xd1\x47\x02\x2a\x81\x39\xc0\x6b\xc0\x7f\x4c\
\x7a\x53\xf0\x79\x80\x2a\x49\x36\xd4\x2b\x0a\x19\x7b\xd7\x90\x56\
\x5f\xb0\x64\x42\x1d\x51\x6d\x92\xe4\x80\xfa\x02\x99\x06\x80\x6d\
\x08\xda\x05\xd8\x0d\xd8\x03\xd8\x0e\x9f\xba\xda\x11\xed\x77\xc0\
\x0a\xfc\x28\xfe\x17\x83\x97\x30\x66\x07\x41\x58\xa1\x8c\x32\x28\
\x99\x54\x43\x75\x5d\xc2\x06\x14\xa7\x26\x00\x37\x90\x65\xab\x6e\
\x20\x05\x5c\x8a\xb9\xcb\x90\xa5\x3a\xe2\x85\x9f\x36\xb1\x96\x64\
\x98\xc6\x29\x38\x0a\xb8\x8b\xec\xfb\x1a\x3e\x69\x68\x1c\x50\x39\
\xb6\xc9\x77\x4e\x9b\x5c\xc5\x84\xe2\xfe\xcc\xac\xad\xd9\x1f\xb8\
\x99\x6c\xca\xcb\x23\xe0\xf7\xc0\xd9\x46\x30\xd7\x59\x9a\xf1\x77\
\x35\xbf\xc5\x99\x93\xaa\x48\x5b\x40\x28\xb7\x05\x66\x17\x02\x47\
\x90\xdd\x32\x69\xa4\x1c\xb8\x43\x70\x2d\x51\xb0\x84\xc2\x34\xe3\
\x6f\xef\xba\x14\xe2\x3e\xa9\x00\x4a\x26\x56\x01\x24\x30\xd6\x07\
\xdb\x1d\xd8\x07\x3f\xda\x6f\x6d\xc6\x80\xd6\xf6\xcb\x1a\x61\x40\
\x6b\x43\x67\x8a\x06\xf9\xd1\xf5\x33\xe0\x7d\xfc\xfa\xfd\x05\xf8\
\x62\xa1\xcb\x81\x32\x7c\x4d\x40\xc7\xaa\xa5\x3a\x81\xa0\x9f\xf9\
\xe9\xc7\x30\xfc\xea\xc1\x8d\xf1\x65\xc4\x46\xe3\xb7\xcc\x1a\x6a\
\xd6\xe2\x86\x99\xed\x6a\x7b\x93\x53\x1d\xf2\xca\x40\xf0\x17\xbc\
\xcf\x60\x36\x84\x15\x66\x4e\x06\x45\x0e\x5d\x8a\x38\xc3\xac\xf9\
\x34\x49\xbe\xce\xfe\x29\x9b\xf4\x0f\x1e\x5c\x50\xe3\x18\x77\xf7\
\xba\x5b\x02\x25\x93\xab\x01\x86\x18\xdc\x8f\xf8\x41\x96\x51\xbc\
\x0a\x18\xef\x42\x3d\x5e\x58\x1d\x72\xe4\x7d\x4d\xac\x97\x49\x75\
\x58\x5d\x21\x2a\xaa\xfd\x3e\xa6\x1b\x24\xb6\xcd\xba\x3b\xb4\xff\
\xcf\xdf\x41\x67\xd5\x17\x15\xbc\x51\x54\xdb\xc0\xb8\x7b\xb2\x2b\
\xb1\x99\x53\xaa\x01\x1b\x2c\xe9\x54\xe0\x4c\x89\x61\xb9\xca\xc7\
\xca\xd7\x1a\xfc\x33\x70\x71\x18\x05\xaf\x45\x81\xdc\xf8\x7b\x5a\
\x5e\x06\xdd\x51\xf4\x19\x05\x30\x7d\x62\x15\x88\x84\x05\x36\x12\
\xf8\x1a\x7e\xa4\xff\x16\x7e\x9d\x7a\x7b\xdf\x42\x87\x17\xd8\x15\
\xc0\x26\xe4\xde\x55\xb7\x35\xa4\xf1\xb5\xee\xeb\xf1\x23\x65\x3a\
\xf3\x33\xc2\xfb\x1b\x0a\xf0\x8b\xb7\x92\x99\xdf\x8b\x32\xbf\xb7\
\x97\x3a\xbc\x02\x1a\x88\x2f\x32\x1a\xb6\xe3\x1a\x6b\x58\x06\xc0\
\x4b\x82\xd9\x26\xfa\x61\xdc\x0d\xfc\x5f\x8e\xcf\x7d\x00\x8c\x37\
\xd9\x3f\xa3\x30\xe2\xd8\xbb\xd6\x6d\xc4\xbb\x67\x42\x35\x89\x04\
\x48\x1c\x84\x2f\x24\x92\xed\x82\x2f\x20\x8e\x02\x4a\xc7\xdf\xd3\
\xfc\x71\xcf\x38\xb6\x86\x0d\xe7\x15\xb3\x68\xeb\x9a\xef\x02\x37\
\x92\xcd\x51\xba\x8a\xff\x01\x67\x26\x93\xfd\xfe\x96\x4a\xd5\x32\
\x7e\x6a\xf6\xc7\x3e\xc3\x0f\x34\x05\x32\xfb\x11\x70\x21\xb9\xfd\
\x0c\x8d\x7c\x0a\x5c\x2b\x98\x6e\x50\xd1\x19\xd3\xc4\xa6\xf4\x5a\
\x05\xf0\x8b\xe9\x2f\xb0\xf3\x9f\xf6\xa3\xa1\xb8\xb6\xc0\x9c\x8d\
\x92\x17\xfa\x6f\xe3\x85\x7e\x2b\xb2\x6f\x0d\xdd\x1a\xd2\xf8\x97\
\xfe\x6d\xbc\xd6\x7e\x09\xb1\x00\xe3\x5b\xc0\x58\xbc\xc3\x6d\x70\
\x3b\xaf\xdd\x15\x54\x01\xff\x01\xee\x03\xfe\x08\x1a\x0e\xb6\x37\
\xde\x0a\xda\x1d\x5f\x9c\xb4\xbd\xca\xa0\xd1\x67\xf0\x47\x7c\xa1\
\x92\xb3\xf1\x96\x4a\x36\xfe\x0e\x4c\x00\x3e\xee\x88\x17\xbd\x64\
\x52\x35\x98\x0d\x40\x2a\x01\x7e\x9a\xe5\x94\x5a\xe0\x38\x05\xee\
\xde\x30\x95\x60\xec\xf4\xe6\x8f\xff\xde\xc9\x35\xfc\x7d\x41\x15\
\xdf\xd8\x68\xc0\xb7\x41\x37\x01\x63\x5a\xf8\xca\xb9\x66\x9c\x1b\
\xa0\xa7\x1d\xe6\x72\x59\x32\x25\x93\x6a\x30\x2b\x46\xaa\x19\x83\
\x57\x02\x3f\x84\x16\xeb\x16\xd4\x02\x4f\x03\x97\x0d\xee\xa7\xb7\
\x2b\x6a\xad\x43\xac\xa4\x5c\xf4\x2a\x05\x50\x32\xa9\x8e\x88\x72\
\x02\x06\x16\x19\xda\x18\xbf\x79\xc7\x3e\x99\x9f\x9b\x40\xeb\xcc\
\xe3\x2c\xa4\xf1\xe5\xba\xff\x03\xbc\x24\xf4\x77\xc3\x3e\x84\xb0\
\xd2\x47\xa4\x1d\xc7\xb0\x90\x19\x6c\x34\x18\x6c\x0f\xd0\x41\x78\
\x8f\xfb\x26\xac\xdb\x28\xdd\x51\xa4\xf1\xd3\x8b\xbf\x00\x4f\x22\
\x7b\x99\x20\x58\x8e\x84\x73\x69\x9c\x44\x22\x08\x06\x62\x36\x1a\
\x6f\x19\x7d\x17\xbf\xc6\xbd\xbd\xca\xa0\x31\x12\xd2\x58\xca\x3c\
\xd7\x39\x25\x16\x70\x06\xa2\x7c\x5d\x5f\xf2\x7b\x27\x54\x93\x4e\
\x80\x89\xef\x03\xf7\xe3\xa7\x4a\x4d\xf9\x1b\x70\x28\xb0\x34\x97\
\xd2\x99\x79\x6c\x35\x95\x83\x1c\x03\x2a\x83\x3d\x65\xdc\x80\x57\
\x8a\xb9\x58\x04\x5c\x20\x73\x33\xc1\x1a\x26\xe4\xf0\x69\x3c\x7a\
\xda\x27\x54\x57\x6f\x88\x99\x1b\x2e\xe9\x34\x7c\xa4\x64\x6d\xb1\
\xbf\x59\xc0\xb9\x89\xc0\xbd\x10\xc9\xd4\x59\x0e\xc2\x1e\xaf\x00\
\x4a\x8e\xad\x24\xac\xad\x27\x3d\xa0\xb8\xd8\x60\x33\xc4\x37\xf1\
\x2f\xf1\x1e\xf8\x72\x5b\xad\xad\x12\xd3\x94\x14\xb0\x18\xf8\x17\
\x5e\x70\xfe\x21\x98\x17\x24\xc2\x6a\x39\x31\xfe\xae\xec\x06\xc4\
\xf4\x49\x35\x00\x49\x7c\x69\xe9\xbd\xcd\x0b\xd3\xd7\xf0\xd5\x7e\
\x0a\x5b\xf5\xcd\x1d\x43\x0a\xaf\xb4\xfe\x9d\x69\xff\x5f\x04\x1f\
\x18\x34\xb4\xf4\xf2\x9b\xb0\x28\x64\x20\xbe\xdc\x55\x47\x28\x83\
\x96\x68\x00\x7e\x8b\xe3\x77\x18\xa9\x6c\xa6\x79\x5b\x28\xf1\x7d\
\x5f\x0c\xba\x0b\x38\x3a\xc7\xf7\x9d\xa2\x28\xbc\x2b\x48\xa6\x18\
\x97\x63\xea\x51\x32\xa9\x06\xb9\x14\x16\x24\x77\x01\x5d\x9b\xe9\
\x87\x5c\x94\x02\x57\x01\x37\x01\x55\x2d\x59\x33\x25\x93\xfc\x94\
\x00\xec\x87\x78\x6b\xa0\x25\x0b\x03\xe0\x23\x60\x1c\xf0\x6a\x90\
\x0a\x18\x3b\xa3\x63\xf2\x27\x56\xa7\x47\x2a\x80\x99\x53\x6a\x09\
\x02\x23\x1d\xb9\xfe\x48\x5b\xe0\xcd\xfa\x7d\xf0\x7b\xbd\x8f\xa4\
\xfd\xa3\x6e\x03\xbe\xbe\xde\x6b\xc0\x4b\xc8\x5e\x36\xe9\x93\x1a\
\x57\x57\x5b\x18\x14\x70\xec\xb4\xd6\xcf\x55\xef\x9e\x52\xc6\x17\
\xfd\x16\x31\xaa\x7a\x93\x62\x23\xd8\x0c\xe9\x2b\xf8\xca\x35\x5f\
\xc1\x3b\xee\x06\xd1\xfe\x69\x48\x36\xea\xf0\xe5\xc2\xe6\xe3\x2d\
\x95\x7f\x03\xff\xc2\xec\x63\xc2\xa8\x1a\x17\x30\xbe\x0d\xa1\xb7\
\x2e\x54\x06\xa5\xc0\x59\x2e\x28\x2c\x09\x5c\x4a\xe3\xa7\xb6\xbf\
\x4b\x2e\xff\x96\x18\x35\xba\x06\xe0\x3b\x88\x87\xf1\xbe\x8d\xa6\
\xbc\x86\x71\x08\x62\x61\x4b\xc2\x3a\xed\xd8\x5a\x12\x89\x34\xce\
\x05\xdb\xe0\xb7\x4b\xfb\x21\xb9\xe5\xa5\x16\xb8\x53\xe8\xb7\x60\
\xcb\x26\xb4\x70\xdd\x19\x93\xab\xe9\xd7\xcf\xa8\xae\xd6\x4e\xc0\
\xaf\x80\x83\x68\xd9\x32\x7d\xd4\x60\x82\xa0\xba\x33\x7c\x02\x3d\
\x46\x01\xcc\x3c\xb6\x12\x03\x8b\xcc\x06\x60\xb6\x35\xde\x6b\xbf\
\x37\xde\x44\xdb\x80\xf6\x57\x37\xaa\xc3\x7b\xe0\x5f\xc5\x8f\x94\
\xaf\x0a\xfb\xec\x9b\xf3\x17\xd7\xbd\xb4\xc5\x28\xa6\xdc\xd9\xde\
\x59\x43\x93\xf6\x4f\xae\x01\x48\x38\x69\x10\x68\x23\xb0\xed\xf0\
\xde\xfb\x2d\xf0\x53\x85\x8d\xf0\x61\xbb\x02\xbc\x02\x0b\xf1\xce\
\xbf\xc6\x7a\x3d\x0e\xef\x10\x4c\x65\x8e\x72\xbc\xb2\xfa\x0c\xef\
\x3c\x7a\x1f\x78\x1f\x63\x01\xa2\x0c\x48\x77\xd4\x0b\x33\x6d\x4a\
\x35\x02\x0b\xc5\x40\x60\x34\xe2\x3b\x74\xac\x32\xf8\x0c\x98\xa2\
\x30\xfa\x83\xa5\x13\xac\x8b\x07\xbc\x64\x52\x35\x60\x85\xa0\x9b\
\x81\xc9\x59\x4e\x49\x01\xf7\x83\xdd\x28\x34\xcb\xb0\x54\x2e\x27\
\xde\xbd\x13\xaa\x49\x87\x60\xb2\x8d\x31\x5d\x8e\x0f\xe9\xe5\x7a\
\xcf\xd2\xc0\x43\x18\xbf\x54\x64\xf3\xc3\x82\x28\x93\x2a\x9c\xa3\
\x9d\x13\xab\x01\x06\x63\x1c\x0b\x9c\x09\x6c\x9a\xe3\xd4\xa5\x06\
\x07\x0a\xfe\xd5\xe7\x14\x40\xc9\xf1\xd5\x04\xc5\x01\x51\x95\xeb\
\x6f\xd2\x68\xb0\x7d\xf0\x42\xff\x65\x7c\xa1\xc6\xf6\x0a\x7d\x2d\
\x5e\x68\x5e\xc6\xc7\xb4\xff\x69\xd2\xc2\x9a\xc2\xc2\x86\xed\x17\
\x2c\xe4\xdb\x4f\x6f\xde\xe9\xf7\x76\xef\xd8\x3a\xc2\xea\x42\xd2\
\x43\x6a\x92\x32\x15\x0b\x2b\xc6\x47\x23\x86\xe2\x15\x41\x11\x58\
\x01\x28\x09\x96\x06\x35\x00\x75\x82\x72\x93\x95\x9a\xa9\x4a\x50\
\x8b\x82\xda\x64\x5d\x61\x83\x4b\xa4\x39\x66\x46\x7b\x67\x3b\xad\
\x67\xda\x94\xea\xcc\x96\x5b\x0c\x34\xd1\xe8\x33\xd8\x07\xff\x4c\
\x86\xd1\x7e\x65\xf0\x36\x62\x82\x19\xff\x8b\x1c\x1c\x3b\xad\xfd\
\x2f\xbb\x57\x02\x7c\x1d\x78\x8c\xec\x4e\x48\xe1\x95\xfe\xc3\x86\
\x4d\x77\xce\xcd\x36\xb3\x28\xd7\x14\x64\x86\x57\xde\xc3\x25\xfd\
\x0a\xbf\xdf\x40\x2e\x0d\x25\xbc\x63\xf5\x2c\x85\xc1\xbb\x61\x2a\
\x62\x6c\x0b\x56\xe3\xf4\x49\xb5\x44\x04\x41\x82\xf4\x9e\xc0\xf9\
\xc0\xbe\x59\xfa\xaf\x01\x98\x02\xcc\xe8\x33\x0a\xa0\x64\x52\x0d\
\x58\x60\xb8\x68\x43\x8c\x7d\xf1\x5e\xdd\xaf\xd3\xfe\x70\x15\xf8\
\x7a\xf9\x1f\x03\xff\x00\xfe\x82\xf8\x97\x8c\x45\x40\x6a\x42\x17\
\x84\x5b\x7a\x23\x9d\xa0\x0c\xfe\x24\x98\x62\xf0\xc9\xba\xbc\xec\
\x25\x13\x6b\x40\x96\x24\x70\xd7\x00\xa7\xb6\x70\xaa\xf0\x39\x18\
\xf7\x01\xf7\xe2\xe7\xdc\x2e\xdb\x77\xcf\x98\x58\x0d\x62\x80\x02\
\x4e\xc3\xef\x0e\x3d\xa4\x85\xeb\xfe\x19\x1f\xe1\x58\xb0\xb6\xfb\
\x98\x71\x5c\x35\x4a\x01\x01\x07\x64\xda\xd0\xf4\xba\x69\xe0\x64\
\xe0\xce\x5e\xaf\x00\xa6\x4d\xa9\x66\xd1\x56\x69\x46\x7d\x90\xd8\
\xd0\x7c\xee\xfa\x38\x7c\x0d\xfd\xf6\x3a\xcf\xaa\xf0\x31\xe7\xbf\
\x03\x2f\x99\xf4\x1f\x17\xb0\xc4\x20\x3d\x3e\x0f\x69\x97\xbd\x99\
\xcc\xa8\x6b\x90\x99\x26\xac\x52\x06\x6d\x99\x26\x08\x78\xc0\xc4\
\x19\x82\xa5\xeb\xe2\x14\xf4\x61\x41\x76\x45\x3c\xc6\xda\xf7\x5c\
\x74\xf8\x2c\xcd\x19\xa0\x07\x20\x98\x0f\x52\x53\x81\x2b\x99\x98\
\x59\x54\x65\x76\x0c\x70\x31\xb9\x43\x9c\x29\xe0\x44\x60\xea\xda\
\x84\xb6\x64\x72\x35\xc2\x36\x36\x69\x2a\xb0\x7f\x96\x53\xaa\x80\
\x23\x81\x67\x7b\xb5\x02\x28\x99\x54\x0d\x3e\x81\xe4\xff\xf0\x5a\
\xfb\xeb\xb4\xdd\x83\x2f\x7c\x87\xbd\x0f\xfc\x0d\xec\x6f\x88\x37\
\x24\x2d\xc5\x48\x4f\x58\x47\x2f\x73\x4c\xeb\x58\x5d\x19\x08\xb6\
\x31\xaf\x0c\xf6\xa5\x75\xca\x20\x02\x6e\x73\xc6\xaf\x0c\x2a\x26\
\xb4\x33\x3c\x38\xdd\x8f\xd8\xa1\x05\x9c\x0e\xfc\x86\xec\xc9\x41\
\x4d\x49\x03\xef\x00\xd3\x30\x3d\x9a\x76\xc1\xa2\xd0\xa4\xd5\x2d\
\xc4\x92\x89\xd5\xa4\x5d\x64\x89\x30\x3c\x12\xbf\x88\x28\xd7\xfa\
\xe6\xf3\x81\x4b\x5b\x8e\x0a\x54\x83\xe8\x8f\x71\x05\x5e\x61\x64\
\x5b\x60\xf6\x36\xe2\x40\xe0\xb3\x75\x8d\x92\x64\x23\xef\x0a\xa0\
\x64\x62\x0d\x83\xff\x53\x4c\xd9\x57\x6a\xb6\x35\x9f\x38\x72\x24\
\xad\x7b\x58\x8d\x08\xef\x10\x7b\x0f\xf8\xab\xc1\x3f\x30\xfe\x6b\
\x68\xb9\x20\xca\xc7\x02\x8b\x98\x55\xe4\x50\x06\xfb\xe1\xa7\x09\
\xc3\xc9\xfe\x0e\xd6\x03\x57\x80\x2e\x07\xea\xc6\xb7\x73\xa5\x5c\
\x26\x45\xb8\x18\xf1\x53\xbc\x19\xbd\x1b\xad\x1b\x54\x1a\xf0\xd9\
\x7e\x53\xc1\x9e\x72\x24\xbf\x08\x48\x33\x7e\x6a\x71\xe3\xfd\x84\
\xf8\x77\xf5\x92\x1c\xd7\xab\xc4\x18\x8b\x78\x2a\x97\x02\x98\x3e\
\xb9\x1a\x17\x10\x84\x11\x27\xe3\x57\x0f\x66\xf3\x2b\xa4\x80\x5f\
\x90\xb6\xeb\x08\xa5\x5e\xa7\x00\xa6\x4f\xac\xc6\xa4\x80\xc0\xbe\
\x87\xef\xcc\xdd\x5a\xf9\xd1\xc6\x44\x93\x59\xf8\x5c\xf4\xbf\x1a\
\xbc\x83\x0b\x4b\x09\xe4\xc6\xad\x43\x28\x29\xa6\xf3\x58\x19\x5a\
\x0c\x6c\x00\xa6\x2f\x03\x57\xe0\x73\x24\xb2\x51\x09\x9c\x6f\x8e\
\x5b\x65\xa4\xdb\xfb\xf2\x97\x4c\xae\xc6\x12\xa0\x14\x23\xf1\x35\
\x1c\x26\xe0\x57\x2d\xb6\x26\x54\x5c\x8f\x0f\x09\x4f\x35\xb3\xdf\
\xd7\x93\x5a\x61\x85\x22\x59\x97\x3c\x08\xb8\x93\xec\x61\x46\x01\
\xd3\x30\x4e\x47\xd9\xf3\x02\x4a\x26\xd6\x40\xe8\xc0\xd9\xf7\x80\
\xa9\xf8\x08\x50\x36\x1e\x06\x3b\x11\x58\x91\x2b\x52\xb1\xae\xe4\
\x4d\x01\xcc\xf0\x9a\xb4\x48\x30\x1e\xbf\x67\xdc\xa8\xb5\x7c\x44\
\xf8\x45\x32\x6f\x92\x11\x7a\xe0\x1d\x82\x74\x99\x14\x6a\x42\x3c\
\xd2\xf7\x18\xbc\x00\x44\xe0\x82\x3d\xf0\x02\x90\x6b\xed\xfc\x32\
\xe0\x4c\x0b\x82\xfb\xe4\xa4\x75\x11\x82\x92\x49\x35\x88\xd0\x8c\
\x68\x23\xd0\xe1\x78\x45\xb0\x3d\xad\xf3\x4d\xd4\x02\xff\x30\xb8\
\x4b\x3e\x39\xec\x56\x72\x27\xf1\xbc\x2c\x63\xbc\x89\x79\xd9\x84\
\xff\xf2\x27\xc5\xa8\xa7\x6b\x20\x60\x3b\xc4\x4c\x72\x67\x1a\xbe\
\x61\x30\x56\x30\x7b\x0b\xfa\xb1\xd7\xd4\xce\x11\xd5\xbc\x28\x80\
\x92\x49\xd5\x48\xf4\x33\xe3\x74\xe0\x17\xe4\x9e\x47\x81\x17\xfc\
\x85\xc0\xd3\xa0\x47\xc1\xfe\x47\x14\x96\x63\xd2\xf8\x69\xf1\x48\
\xdf\x53\x99\x39\xb1\x96\xe5\x83\x96\x31\xb4\x72\xd8\x81\xf8\xb9\
\xf4\xc6\x39\x4e\xfd\x08\x38\x0c\xf8\x6f\xc7\xac\x19\xa8\x22\x08\
\x22\x73\x2e\xb1\x25\x70\x14\x70\x0c\xb0\x35\xad\x2b\x8f\x57\x89\
\x9f\x6e\x6e\x44\x76\xd9\xf9\x44\x30\xde\xc4\xdf\x5d\x08\xc7\x66\
\x49\xbc\x2a\x99\x54\x83\x89\xe1\x32\xdd\x82\xdf\x28\x35\x1b\x0b\
\x81\xc9\x51\xa0\x17\x92\x51\xc0\xd8\x4e\x5c\x19\xd8\xe5\x0a\x20\
\x33\x2f\x2b\x42\x9c\x8d\xdf\x7e\xb9\xa5\xa7\xba\x1c\x5f\xc1\xf5\
\x6e\x11\xbc\x0d\x4a\x4d\xe8\x24\x53\x28\xa6\xeb\x99\x31\xa9\x86\
\x84\x92\x96\xb2\x86\x63\xf1\x2b\xf0\x72\xbd\x0b\xe7\x62\xba\xd2\
\xa5\x93\x1c\x3b\xbd\x63\xb2\xa9\x67\x4c\xa9\x22\x08\x2c\x88\xd2\
\x6c\x8b\x8f\x36\x1d\x89\x5f\x52\xdd\x5e\x99\xa8\x04\xce\x8a\x0a\
\x92\x53\xc3\x54\x5a\xe3\xef\x6e\xfe\x9e\x96\x4c\xaa\x46\x46\xd2\
\xc4\xf9\xf8\x77\x3f\xdb\x34\xa4\x06\x38\xcf\x22\xbb\x59\x81\x5c\
\x67\xcc\xfb\x57\xa7\x4b\xf7\x06\x9c\x3e\xb1\x1a\x97\x56\x18\x84\
\x76\x1c\x70\x2e\xb9\x1f\xb8\x03\x5e\x01\x2e\x97\x2f\x40\x51\x3f\
\x21\x9e\xd7\xf7\x3a\x64\x22\x65\x0d\x09\xb4\xd6\xc2\x2b\x85\x7e\
\x80\xae\xeb\xb0\xef\x1e\xe7\xeb\xfb\xb9\x92\xc9\xd5\xef\x03\xbf\
\x46\x3c\x00\x1c\x8b\xb7\x36\x46\xd1\x36\x45\x10\x01\x77\x98\xb8\
\x37\xac\x4f\x65\x75\xd6\xcd\x98\x5c\xc5\xd8\x1d\xff\xc3\x8c\x77\
\x76\x3f\x04\x1f\xe5\xca\x26\xfc\x0e\x28\x01\x9b\xaa\x30\x7b\x3e\
\x42\x47\xd3\x65\x16\xc0\xf4\x89\x95\x14\xf5\x1b\x40\x7d\x6d\xcd\
\x4f\xf1\x26\xdf\xfa\x39\x4e\xad\x06\xa6\xc9\xec\xca\x28\x11\x7c\
\x96\x48\x45\x74\x45\x47\xc4\x74\x2d\x19\x4b\xb0\x00\x71\x02\x70\
\x11\xb9\x13\x6b\x96\x08\x8e\x34\x78\xa9\x33\xdf\x03\xef\x23\x20\
\x69\xbe\x2a\xd4\x24\xe0\xc7\xf8\x14\xf3\xd6\xf0\x0c\xc6\x14\xc4\
\x92\xac\x4e\xbf\xe3\xab\xbd\x3f\xdf\xf8\x2a\x30\x13\xbf\xb6\x22\
\x1b\x7f\x02\x9b\x08\xfa\xac\xab\xde\xf9\x2e\xb1\x00\xae\xbb\x5d\
\xd8\xbf\x6a\xa8\xaf\xad\xf9\x12\x3e\x81\x22\x97\xf0\xaf\x00\x2e\
\x07\x6e\x33\xa9\x7a\xd2\xed\x1d\x93\x87\x1f\xd3\xbd\x28\x99\x5c\
\x8d\x41\x81\xc4\xf1\xb4\x2c\xfc\x95\xc0\x55\xb2\xe0\xe5\xce\xde\
\x71\x25\xe3\x60\x4c\x95\x4c\xaa\xfe\x17\xc6\x5b\x19\x07\xdd\x14\
\xfc\x1e\x87\x2d\x2d\xdd\x7d\x07\xe3\x02\x13\x4b\xea\x73\x05\x18\
\x23\x50\x60\x1b\x9b\xf4\x5b\x72\x0b\xff\x5c\xe0\x7c\xa4\xcf\x3a\
\x7c\xcd\x65\x0b\x74\x89\x05\x90\xd1\xf6\x83\x11\x77\x90\xdb\xf1\
\xb1\x02\xf8\x95\xa1\xa9\xc2\x52\xf1\xa8\xdf\x3b\xc9\x22\xfc\xb9\
\xb6\x03\xae\x04\x7e\x6b\xe2\x46\xa0\x6e\x5c\x17\x27\x71\xcd\xf0\
\x49\x3a\xc5\xf2\x85\x5e\x26\xe1\x8b\xab\x36\x75\x56\x7f\x01\x9c\
\x10\x34\x54\x3e\x11\x15\x0d\x61\xc2\x5d\xcd\x07\xac\x56\x26\xfb\
\x94\x02\xa7\x2c\x5f\x91\xbc\x7f\xd8\xd0\x14\x5d\x99\xb0\xd6\xe9\
\x1b\x83\x4c\x9f\x58\x4d\xb2\x30\x02\x71\x18\xde\xac\xca\x46\x15\
\x70\xb1\x61\xb1\xf0\xf7\x62\x1a\xcd\xfe\x56\x08\x7f\x05\x79\x14\
\x7e\x80\x71\x53\xfb\x53\xb1\xa2\xa1\x36\x48\xda\x8b\x42\x13\xf1\
\xe1\xea\xe7\xf0\xa1\xe8\x5a\xfc\x1a\x82\xf3\x83\x40\xcf\xa8\x70\
\x60\x56\xe1\x9f\x3e\xb9\x9a\x28\x24\xc0\x98\x88\xf7\x2f\x64\x93\
\xb7\x14\x70\xb3\x33\x1e\x1d\x36\xbc\x6b\x85\x1f\xba\xc0\x02\xc8\
\x64\x4e\x6d\x85\x5f\x99\xb5\x73\x96\x53\x22\xe0\x5a\xcc\x2e\x44\
\xaa\x8d\x85\xbf\x77\xd2\x64\xe4\xbf\x98\x96\xcd\xfe\x4b\xf0\x05\
\x36\xea\xba\xc3\xfb\x50\x72\x7c\x35\x23\xef\xe8\xc7\xe2\x89\x35\
\x43\x80\x5d\x64\x0c\x03\x66\x5b\xc0\x5c\x20\xca\x56\xe2\xbc\x64\
\x62\x0d\x16\x3a\xd4\x8a\x64\x1f\x11\x9c\x04\x5a\x9e\x8f\x08\x57\
\xa7\x2a\x80\xe9\x93\xab\x48\x5b\xda\x92\x2e\x79\x3e\x3e\x1f\x3b\
\x9b\x06\x7c\x5e\xe8\x58\xc3\x96\x74\x87\x87\x1d\xd3\xf1\xac\xe6\
\xf0\x6b\xcd\xc8\x7f\x69\x3e\x47\xfe\x8e\xe0\xd6\xbb\x44\xff\xd7\
\x6a\xc0\x5a\x93\xec\x63\x63\x31\xcd\x5e\xfc\x61\x3f\x7e\xfe\xd7\
\xae\x4f\xcb\xe9\xd4\x29\x80\xc9\x48\x46\xc9\x2d\xf1\x85\x14\xb2\
\x7d\xd7\x42\xa1\xcb\x0d\x5b\x92\x0a\x52\x5d\x7e\xf3\x31\x9d\x4f\
\xe3\xc8\xcf\xaa\x91\xbf\xa5\x39\xff\xa5\xc0\x8d\xb2\x9e\x2b\xfc\
\x00\xfd\x5e\xaf\x45\x66\xc3\x10\xbf\x21\xb7\xf0\x7f\x0e\x9c\xef\
\x12\xe9\xd9\x44\x41\x5e\x84\x1f\x3a\x51\x01\xdc\x3b\xb9\x8a\xb4\
\x0c\x8c\x03\xf1\xcb\x43\x9b\x22\xa0\x04\x05\xaf\x4a\xc6\xe4\x36\
\x6c\x8a\x11\xd3\x33\x68\xa3\xb7\xff\x92\xc6\x91\xbf\x27\x5b\x82\
\x25\x93\xaa\x31\x29\x69\xd2\xe9\x64\xaf\x4e\x0c\x3e\xd9\xe7\x8a\
\xc8\xec\x45\xd2\x21\xe3\xf2\x98\xd1\xda\x69\x61\xc0\x48\x46\xc2\
\x34\x04\xf8\x11\xd9\xf3\xad\xdf\x07\x66\x98\xa9\x53\xb6\xc9\x8a\
\xc9\x2f\x6d\x74\xf8\x5d\x6a\x6a\xdf\x9c\x7f\xda\xb1\x95\x48\xb2\
\x30\x0c\x07\xe0\x57\xd4\xd5\x44\x51\x54\x65\x66\x6a\x4b\x0d\xc7\
\x8e\x60\xc6\xe4\x2a\xc6\x7e\xfd\x1f\xcc\xf8\xe7\xb7\x0e\xa5\x15\
\xc9\x3e\xa1\xba\x26\xd9\xa7\x25\x3a\x75\x0a\x20\xef\xf4\xdb\x35\
\xc7\x9f\x1f\x87\x68\x6e\x67\xc7\x77\x63\xba\x9e\x36\x9a\xfd\xbf\
\xa5\x9d\x66\x7f\xc9\xb1\xd5\x04\x0a\x83\x30\x08\x1b\x4b\x81\xbf\
\x04\xdc\x1f\x86\xe1\x0f\x0a\x08\xc3\x99\x13\x6b\xba\xee\x9e\x8f\
\xaf\x46\xce\x98\xf1\xcf\x6f\x7d\x15\x5f\xf1\x37\xd7\x3d\xff\x05\
\xec\x72\x50\x75\x67\xad\xf0\x6b\x0b\x9d\xa2\x00\x66\x4e\xae\x21\
\x92\xc3\x7c\xb5\xde\x6c\x1d\xb1\x18\x78\x0a\x42\xb5\x77\xad\x77\
\x4c\xf7\x64\x35\xb3\xff\x38\xd6\x6e\xf6\x5f\x6a\xe2\x26\x53\xdb\
\x47\xfe\xe9\x13\xab\x89\x30\x23\xd4\x21\x18\x77\xe2\x13\x76\xb6\
\xcf\xfc\x9c\x9a\x0e\x34\x29\x1d\xa8\x60\xba\x8f\x42\x75\x3e\x69\
\x30\x63\x63\xbc\x42\xcb\x95\xec\xf3\x01\x70\xbe\x49\x9f\xa9\xd3\
\x03\xf0\xad\xa3\x53\x9a\xe1\x24\x42\xb3\x01\xf8\xaa\x3e\xd9\x78\
\xdd\x02\x7b\xcf\x2c\xef\xf5\x48\x62\x3a\x90\x2c\xa1\xbe\xb5\x8c\
\xfc\xba\xa1\x3d\x23\xff\xf4\x89\xd5\x38\x99\x85\x81\x0e\x05\xae\
\xa5\xf9\x4a\xc2\x0d\x81\x2b\x02\x71\xb6\x99\xfa\x67\xa6\x23\x9d\
\x77\xdf\xbe\xfa\x50\x7f\xf9\x95\xad\xdf\xcd\x71\x5a\x29\x70\x51\
\xe1\xc6\xe1\xeb\x0a\x61\x42\x07\xed\x8e\xbc\xae\x74\x9e\x1e\x32\
\x1b\x41\xf6\xfd\xd5\x04\xfc\x23\x4a\xa7\xab\xc3\xa0\x4b\xd7\x22\
\xc5\x74\x22\x6d\xcc\xf0\xbb\x04\x74\x23\x58\x9b\x47\xfe\x92\xc9\
\x35\x04\x89\xa0\x51\xf8\xaf\x23\x77\x7c\x7d\x08\x70\x3e\xb2\x5f\
\x9b\x0b\x86\xcc\xe8\xa4\xe9\xc0\xf4\xc9\xd5\x44\x89\xb5\x26\xfb\
\xa4\x81\x9b\x31\x1e\xa9\x5f\x18\xb5\x69\x7f\x86\xce\xa6\x33\x0d\
\x91\x2d\xc9\x9e\x43\x5d\x0e\xbc\x11\x04\x21\x47\xdf\xd9\xf9\x65\
\xac\x63\x3a\x9f\xb6\xa6\xf7\xfa\x7d\xf7\xda\x27\xfc\x16\x98\x29\
\x72\x8d\x23\xff\xda\x8a\xc8\xf4\x03\xce\x90\xb9\xab\x14\x68\xc3\
\x69\x53\xaa\x79\xe8\xe8\x8e\xb3\x06\xee\x99\x52\x43\x32\x12\x61\
\xc4\xbe\xf8\xd5\xad\xb9\x26\xf5\x4f\x18\xc1\x0d\x26\x6b\x18\xdf\
\x89\xfb\xfc\xb5\x87\xce\x53\x00\x62\xf3\x1c\x1d\x52\x66\xbe\x3c\
\x77\x4c\x2f\x20\xcb\x9c\x7f\x2d\x71\xfe\xf6\x8d\xfc\xd3\x27\x56\
\x13\x45\xac\x2e\xfc\x1b\xb5\xf2\xa3\x05\xc0\x44\xc4\xed\x81\x63\
\xbb\x74\x22\x62\xda\x94\xda\x75\xbe\xef\xc2\x40\x84\x12\xe9\xd0\
\xb6\xf3\xf7\x95\xb3\x3d\x6f\x20\xbb\x10\x73\xcb\x3f\x7d\xaf\xfb\
\x2d\x69\xef\x70\x05\x70\xeb\xf1\x35\x8d\xe9\x85\x1b\x92\x3d\x0c\
\xf2\x99\x44\x45\xec\xfc\xef\xf9\xb4\x67\xce\xdf\x5e\xe1\x6f\x32\
\xe7\x6f\xad\xf0\x37\x12\xe0\xd7\xa1\xdc\x93\x4e\x84\xdf\x8c\x6a\
\x13\x4c\x5f\xc7\x29\xc1\x1d\xc7\xd6\x22\xb5\x22\xd9\x47\x76\x81\
\x12\xd1\x6c\xa2\x80\x0b\xfe\xd9\xfd\x7c\x5e\x1d\xae\x00\x06\xa4\
\x22\x5c\xa2\x10\x72\xbf\x0c\x4b\x64\xaa\xc7\x62\x0d\xd0\x93\xe9\
\x86\x73\xfe\x5a\xfc\xb6\xe4\x8b\x5b\xb8\xdc\x1e\xc0\xd4\x64\x61\
\xc3\x4f\x2d\x08\xc2\xcc\x66\xa2\x6d\xbf\xf7\x49\xd5\x18\x4a\x1a\
\x6b\x4d\xf6\xb9\x12\xf8\x83\x45\x41\x5e\x93\x7d\x5a\xa2\xc3\x15\
\x80\x08\x08\x1a\x1a\x02\x72\x6f\x7c\x59\x1b\x98\xa5\x2d\xff\x15\
\xc9\x63\xda\x49\x37\x9c\xf3\xd7\x03\xb7\x5b\xc0\x91\xf8\x04\x9c\
\x96\xa6\x98\xdb\x02\xb7\xa1\xe8\x44\x93\xfa\xcd\x68\x63\x98\x70\
\xe6\xe4\x2a\xc6\xdd\x36\x16\xa0\xa5\xca\x3e\x02\x66\x20\xa6\x82\
\xf2\x9e\xec\xd3\x12\x1d\xae\x00\x4c\x01\x81\x4b\x18\xb9\xab\xad\
\xd6\x45\x2e\x70\x2e\x56\x00\x3d\x92\x76\x08\x7f\xbb\x47\xfe\x26\
\xc2\x9f\x6b\xe4\xaf\x07\x6e\x0b\x02\x2e\x42\x2c\x97\x2b\x7c\x0c\
\x5f\xc8\xe3\xcd\x16\x2e\xbf\x3e\x70\xb9\x8c\x4b\x1d\xac\x3f\x63\
\x62\x35\x53\x8f\x5f\xbb\x22\x58\x99\xec\x73\xe2\xcc\xb5\x27\xfb\
\x18\x97\x13\x50\xd5\xd9\x35\xfd\xd6\x95\xce\x71\x02\x7a\xeb\x3e\
\x97\x84\x3b\x59\xa8\x6e\xb0\x27\x49\x4c\x1b\xe9\x86\x0e\xbf\x46\
\xe1\xff\x8d\x44\xf9\xb8\xbb\xfb\x63\x41\xbd\xcc\x12\x7f\x06\x26\
\xe2\x77\x7b\xce\x35\xd7\x1c\x00\x9c\x62\x70\x97\x8c\x9d\x92\x05\
\x55\xbe\x5c\x79\x4b\xa4\x81\x55\xc9\x3e\xa3\x73\x9c\xf5\x01\xd8\
\xf9\x92\xcd\x4f\xa8\xfb\x4f\x73\xbb\x49\x3e\x52\x4c\x77\x67\x86\
\x17\xfe\x64\x46\xf8\x3b\xd5\xe1\x17\xb5\xce\xe1\xd7\x40\x13\xe1\
\x07\x18\x3f\xb5\x3f\x66\x0e\x49\xff\x33\x6c\x0a\xf0\x20\xbe\xe8\
\x46\x36\x12\xf8\xb5\x2a\xd3\x55\x33\x60\x7f\x59\x2a\x28\x99\x54\
\x95\xf5\xc4\x92\x89\xd5\x98\x5b\x6b\xb2\x4f\x19\x70\x51\x62\x53\
\x7b\xcd\x02\x71\x74\x0f\xc8\x72\x8d\x15\x40\xcc\x5a\x59\x6d\x61\
\xcf\x09\xb4\x42\xf8\x4d\xdc\x68\x6a\xbb\xf0\xcf\x98\x54\x43\x81\
\x05\x96\x58\xbb\xf0\xd7\x03\xb7\x06\x81\xad\x21\xfc\x8d\x8c\xbd\
\xab\x98\x09\xf7\x0c\x00\x98\x67\x66\xa7\x02\xd7\xe3\xab\x4e\xe5\
\x62\x37\x60\xaa\x91\x38\x0e\x59\xbf\x92\x89\x6b\x4e\x07\x1a\x93\
\x7d\x14\xac\x35\xd9\xe7\x26\xc3\x1e\x89\xe6\xbb\x6e\x95\xec\xd3\
\x12\x71\x2a\x5e\x4c\x8b\xcc\xf0\xc2\x9f\x6c\x8b\xd9\x2f\x6b\xdf\
\xc8\x9f\x76\x98\x02\x77\x08\xad\x36\xfb\xd5\x4c\xf8\x57\x67\xdc\
\xd4\x7e\x94\x4c\xaa\x59\x0e\xf6\x1b\xd0\xa7\xc0\x05\xe4\xae\xf4\
\xbb\x31\x70\x15\xc6\x8e\x32\xbb\x62\xda\xa4\xaa\x05\x41\x10\x12\
\x21\x0a\xd2\x8e\x94\x6c\x5f\xe0\xe7\xe4\x4e\xf6\x79\xd2\xb0\x1b\
\x85\x1a\xba\xb3\xd3\xaf\x29\xb1\x05\x10\x93\x93\x76\x8c\xfc\x37\
\xac\xcb\xc8\xdf\x8a\x50\x5f\x46\xf8\xb3\x8f\xfc\xd9\x18\x3f\xb5\
\x1f\x66\xd4\x04\x61\x70\x3b\x70\x3c\x30\xbb\x85\xd3\x07\x00\x27\
\x99\x54\x12\x60\x7b\x04\x0d\x81\x85\x4e\xa4\x56\x25\xfb\xe4\xda\
\xbd\xe8\x0d\xb0\x0b\x05\xcb\x0a\x0a\x5a\x32\x34\xba\x1f\xb1\x02\
\x88\xc9\x4a\x7b\x1c\x7e\xed\x5d\xd8\x93\x76\x58\x8a\xb6\x38\xfc\
\xd4\x2a\xe1\x6f\x64\xdc\xdd\xfd\x20\xe5\xa2\xc1\xe5\xe1\x53\xf8\
\x5d\x80\xfe\x88\x5f\x97\x9f\x8d\x00\xd8\x07\xb8\xd7\x25\xd2\xe3\
\x31\xdb\x08\xf1\x6b\x72\x27\xfb\x2c\x02\xbb\x20\x95\x4a\xbf\x17\
\x86\xc6\x91\xb7\xb5\x76\x2b\x81\xee\x41\xac\x00\x62\x9a\xd1\x53\
\x1c\x7e\x6d\x61\xec\xb4\xfe\x94\x0f\x8c\x30\xb3\xff\x18\x36\x11\
\xb8\x03\xbf\x09\x4d\x2e\xb6\x04\xae\x43\x7a\x04\xbf\xab\x70\x36\
\x6a\x80\x2b\x10\x7f\x48\x26\x43\x8e\xb9\xb3\x7b\x26\xfb\xb4\x44\
\xac\x00\x62\xd6\xa0\x49\x25\x9f\xb5\xd6\xf0\x6b\xaf\xc3\xaf\x31\
\xc3\x6f\x5d\x1d\x7e\x6d\x61\xfc\xb4\xfe\x8c\xbb\xbb\x1f\x32\x2d\
\xc0\x38\x07\xf8\x15\xb0\xa4\x85\x8f\x0c\xc1\x67\x0f\x66\xdb\xa1\
\x46\xc0\x0c\x13\x77\x77\xf7\x64\x9f\x96\x88\x15\x40\xcc\x4a\xda\
\x33\xf2\xb7\xc7\xec\x9f\x31\xb9\x86\x20\x30\xc3\xc7\xf9\x5b\x9a\
\xf3\xaf\x36\xf2\xb7\xcd\xec\x6f\x89\xf1\x77\xf7\x07\x59\x35\x84\
\x37\x03\x93\x81\x37\xda\x71\x99\xbf\x80\x5d\x2e\xa3\xba\xbb\x27\
\xfb\xb4\x44\xac\x00\x62\x80\xae\x1d\xf9\x95\xc9\xf0\xd3\xda\xd3\
\x7b\x3b\x64\xe4\xcf\x86\x2f\xc7\xe5\xa2\x82\x82\x79\xcf\x02\x63\
\xf1\xf9\x02\xf5\xad\xfc\xf8\x07\x60\xe7\x9b\x69\xbe\x73\x3d\x5b\
\x84\x7a\x76\xeb\x63\x3a\x84\xae\xaa\xe4\x53\x32\xb9\x06\x02\x33\
\xf3\xc2\xdf\x0a\x6f\x7f\xc7\x8e\xfc\x4d\x19\x3f\xb5\x1f\x47\xde\
\x36\x06\xc1\x6c\x61\x27\xe1\x37\x24\x59\xba\x96\x8f\x2d\x01\x2e\
\x18\x10\x26\x5f\x33\x19\xc7\x76\xd3\x45\x3e\xad\x25\x56\x00\x7d\
\x9c\x36\x56\xef\xbd\x44\xd6\xbe\xf4\xde\x19\x93\x6a\x48\xc8\x8c\
\xc8\x1d\x96\x11\xfe\xb5\x8c\xfc\xed\x77\xf8\xb5\x95\x09\x53\xfb\
\x63\xa2\xd4\x64\x57\xe2\xa7\x04\xff\xa4\x79\xf6\x60\x1a\x78\x0b\
\x38\xd5\x70\x8f\x56\x45\xf5\x8c\xed\x06\x45\x3d\xd7\x95\x38\x11\
\xa8\x0f\xd3\x86\xed\xba\x2a\x80\xdf\xca\xd4\xae\xd2\xdd\x33\x27\
\xd5\x90\xc0\x2c\x85\x3b\x8c\x56\x99\xfd\xba\x48\xb2\x2e\x11\xfe\
\x46\xc6\xdf\xd3\x8f\x92\xc9\xd5\x29\x97\x74\x4f\x87\x0d\xe1\x7f\
\x85\xf6\x07\xbe\x0a\x0c\xc7\x57\xb1\xfa\x2f\xf0\x82\x29\xfc\x58\
\x66\xea\x0e\x15\x7d\x3b\x82\x58\x01\xf4\x51\xb2\x64\xf8\x0d\xc9\
\x71\x6a\x25\x70\x99\x4c\x37\xb5\x2b\xc9\x27\x63\xf6\xa7\xbc\xd9\
\x7f\x0d\xb9\x85\xdf\x3b\xfc\x8c\x8b\xe4\xac\x7c\x5c\x1e\xbc\xea\
\x8d\xe5\xba\x4a\x4e\x2c\x5d\xd0\xef\x3f\x43\xee\xa9\xd9\xa5\x66\
\x86\x39\x92\x82\xf4\x17\x1b\x28\x35\xb8\xcc\x38\xee\xd6\xde\xb5\
\x65\x7d\xac\x00\xfa\x20\xab\x09\x7f\x6b\xb7\xeb\x6a\x5f\x86\xdf\
\x6a\x4b\x7a\xb5\xf6\x38\xff\xad\x01\xe6\xe7\xfc\x79\x0e\xa9\x8d\
\xbf\x2d\xd3\x1d\xff\x26\x8d\x37\xfd\x7b\x2d\xb1\x0f\xa0\x8f\xd1\
\x1e\xe1\xa7\x1d\x66\x7f\xbb\x84\x9f\xfc\x0b\x7f\x5f\x23\x56\x00\
\x7d\x88\x58\xf8\x63\x9a\x12\x2b\x80\x3e\x42\x2c\xfc\x31\xd9\x88\
\x15\x40\x1f\x20\x16\xfe\x98\x5c\xc4\x0a\xa0\x97\x13\x0b\x7f\x4c\
\x4b\xe4\x43\x01\x74\xff\x42\x69\xbd\x84\x58\xf8\x63\xd6\x46\x3e\
\xc2\x80\x85\xa1\xd2\x83\x0d\xa5\x67\x4c\xac\x8e\x2b\x83\x76\x1e\
\xc2\x11\xca\x38\x86\x56\xad\xe7\x6f\x9f\xf0\x37\xe6\xf6\xb3\x76\
\xe1\xf7\xe9\xbd\xb1\xf0\x77\x2b\xf2\xa1\x00\xf6\xc7\xf4\xb0\x62\
\x4b\xa0\x2b\x28\x00\x76\x06\x06\xe7\xf8\x7b\x25\x3e\xc3\xaf\x7d\
\x35\xfc\xd6\x1c\xf9\x5b\x4a\xef\x6d\x4c\xf2\xf1\xc2\xdf\xcd\xf6\
\xc7\xeb\xcb\xe4\x43\x01\x6c\x44\xdb\xb7\x76\x8a\xe9\x78\x1a\x47\
\xfe\x8e\x10\xfe\xb5\xa5\xf7\xde\xd6\x5d\x92\x7c\x62\xd6\xa4\x73\
\x14\x40\x6c\xd8\x77\x77\x56\xe6\xf6\xb7\x47\xf8\xef\x3d\xb6\x8a\
\xb4\x84\x45\xfa\x0e\x70\x35\x6b\x2b\xe6\x61\x5c\x14\x8f\xfc\xdd\
\x93\x8e\x77\x02\x86\x0e\xfa\x45\x0e\x6f\xf6\xc5\x74\x3f\xd6\x69\
\x61\x0f\x40\x14\x40\x80\x0a\xf0\xeb\xe8\x37\xc9\x71\xda\xaa\x85\
\x3d\x74\xcd\xaa\xbe\x98\xb6\xd3\xf1\x7b\x03\x4a\x44\x0d\x4e\xf8\
\xd5\x53\xad\x2d\xb0\x10\xd3\x35\xac\x21\xfc\x13\xee\x6e\xef\xc6\
\x15\x86\xa0\x10\x18\x99\xe3\x84\x7a\x2c\x3f\xab\xfa\x62\xda\x46\
\x87\x2b\x80\xf1\x99\xdd\x50\x64\xf6\x14\x30\x1d\x3f\xd7\x8c\x1d\
\x7e\x9d\x4b\x6b\xfa\xb7\x83\x84\xdf\x8b\xbf\x79\xe5\xfe\x45\x8e\
\x53\x5e\x36\xb8\x5c\x2e\x16\xfe\xee\x4e\xa7\xf8\x00\xc6\x4f\xed\
\xcf\x8c\x89\xd5\xa5\xc8\xce\x51\xa0\xa7\x80\x6d\x2c\x5e\x79\xd8\
\x19\x08\x28\x14\x1c\x0a\x7c\xb9\x85\xf3\x2a\x81\x4b\x3b\x42\xf8\
\x01\x02\x44\x8a\xb0\x21\x40\xb9\x76\xe1\x0d\xa5\x9c\xdb\x71\xc5\
\x74\x23\x62\x77\x5d\x0f\x65\xfa\xa4\x6a\x1c\x84\xa1\xaf\x60\x73\
\x29\xbe\x70\x45\x36\xd6\x29\xce\x9f\x8b\x12\xbf\xad\xf6\x04\xe0\
\x4e\x9a\x6f\x91\xfd\x31\xc6\x77\x80\xf9\xe3\x63\x0b\xa0\x5b\x13\
\x8f\xca\x3d\x90\x92\x49\xd5\x48\x56\x10\x9a\x5a\x53\xbd\x77\x9d\
\x84\x7f\xa6\xdf\x27\x2f\x74\x46\xd2\x40\x52\x20\x94\x76\xcb\x2a\
\xeb\xd2\x23\x06\x16\x7d\x8a\xaf\xad\x3f\xa4\xc9\xc7\x06\x23\x36\
\x06\xe6\xe7\xbb\xaf\x62\x5a\x26\x56\x00\x3d\x8c\x19\x93\xaa\x09\
\xa0\x20\x32\x9d\x40\xcb\x95\x7c\xd6\x29\xd4\x57\x32\xb1\x1a\x9c\
\x05\xce\xb4\x27\x70\x30\xb0\x81\x20\x8d\x39\x87\x05\xa9\x11\x03\
\x8b\xd2\xf8\x04\xa3\x82\x2c\x1f\xef\x0f\x6c\x01\xbc\x9a\xef\xfe\
\x8a\x69\x99\x58\x01\xf4\x20\x56\x0a\x3f\xb4\x5a\xf8\x69\xc7\xc8\
\x3f\x6d\x52\x15\x86\xa1\x50\xfb\xe0\x4d\xfc\x2d\xda\xd8\xd4\x42\
\x60\x0b\x23\x60\xda\xa4\x72\x8e\x9d\x3a\xb8\x8d\x1f\x8f\xe9\x2a\
\xe2\xd5\x80\x3d\x84\x92\x49\x35\x38\x82\x30\xf2\x73\xfe\x56\x0b\
\x7f\x7b\x1c\x7e\xa1\x00\xb9\x22\x60\x22\x6d\x17\xfe\x46\xb6\x40\
\x51\x32\x88\xc7\x98\x6e\x4d\xac\x00\x7a\x0c\xc2\xe4\xbe\x84\xdf\
\xa2\x7a\x48\x8e\x93\x3a\x26\xd4\x67\x06\x66\xfd\x80\xcd\xd6\xa1\
\xc1\x23\x64\x56\xb0\x0e\x9f\x8f\xe9\x02\x62\x05\xd0\x93\x30\x76\
\x25\x77\xce\x7d\x87\xc5\xf9\xe5\x8f\x08\xbf\xf9\x65\x7b\x59\x80\
\xb3\xfa\x38\x03\xa4\x7b\x13\x2b\x80\x9e\x45\x1d\xd9\xb7\xb5\x6e\
\x00\xae\xf6\x9b\x76\xac\x7b\x9c\x1f\x04\x52\x03\x50\xd6\xce\x0b\
\xcc\x06\x66\x5a\xa0\x74\x65\xac\x01\xba\x35\xf1\x04\xad\x07\x61\
\xe8\x03\x61\x95\x78\x27\xdb\xea\x34\x00\x6f\x9a\xac\xbe\x23\xe2\
\xfc\x86\x11\x60\xa9\x08\x95\xe5\x38\xe5\x7d\xe0\xe5\xcc\xef\x49\
\x20\xc4\xbf\x4b\x06\x2c\x00\x1e\x96\xd9\xbf\x41\x9c\x32\x75\x5d\
\x95\x51\x4c\x67\x12\x2b\x80\x1e\x42\x26\x63\xeb\x33\x60\x31\x30\
\xa2\xc9\x9f\x07\x00\xa3\x41\xcf\x4c\x9b\x52\xc7\xb1\x77\xb5\x7e\
\xf3\x8a\x19\x13\x2b\x01\x12\x10\x24\x1d\x41\x2a\x1d\x04\xe9\x48\
\x29\x02\x12\x69\x48\x97\xe6\xf8\xd8\x2c\xc4\x99\x16\xb8\x5a\x53\
\x22\x14\xce\x9c\x61\x99\xb1\x3e\x05\x44\xc7\xde\xdd\x3b\x76\xce\
\xe9\xed\xc4\x0a\xa0\x87\x20\x84\xa1\x0a\xb0\xb9\xc0\x4e\x59\x4e\
\x19\xed\x8c\x84\x29\xdd\xaa\x8d\x2c\x66\x4c\xae\x84\x20\x0a\x70\
\xc1\x37\x25\x0e\x07\x36\x30\x5c\x59\x52\xee\x0b\xb0\x2f\x22\x4b\
\x2f\x22\xf7\x32\xdf\xc1\x0a\x08\xa4\x20\x9a\x30\xb5\x38\xca\x77\
\xdf\xc4\xb4\x9f\x58\x01\xf4\x10\x42\x27\xea\x0a\x0a\x6a\x0a\xd2\
\xe9\xd9\x39\x66\xd5\xdb\x9b\x6c\xa0\x41\xe9\xda\xae\x35\x7d\x62\
\x35\xe9\xc8\x2c\x54\x70\x30\x7e\x3d\xff\xa6\x4d\x4e\x11\x7e\x5a\
\x91\x2b\x55\x7c\x88\xa9\x59\xfa\x6f\x4c\x0f\x24\x76\x02\xf6\x10\
\x8e\x99\x36\x90\x64\x3a\x8d\xd0\x1c\xb2\xd7\x5a\xd8\xdc\xc4\x7a\
\x6b\xbb\xce\xf4\x89\xd5\x38\x99\x85\x81\x0e\xc1\x97\xf1\xda\x34\
\xcb\x69\x86\xf7\x33\xe4\x0a\xe3\x0d\x86\x58\x01\xf4\x06\x62\x05\
\xd0\xe3\xb0\xf7\xf1\x21\xbf\xa6\x0c\xc6\xd8\xa6\xa5\x4f\xae\x26\
\xfc\x87\xe2\x85\xbf\xbd\xa5\xd9\x06\x98\x51\x9c\xef\x9e\x88\x59\
\x77\x62\x05\xd0\x93\x30\xc0\xf8\x0c\x58\x92\xe5\xaf\x03\x80\x5d\
\xc1\x8a\x4a\x8e\xad\x0b\x4a\x26\xd6\xae\xf1\xc7\x92\xc9\x35\x04\
\x89\xa0\x23\x84\x1f\xc0\x24\xc2\x7c\x77\x47\xcc\xba\x13\xfb\x00\
\x7a\x12\xf2\x8e\x40\x11\xcc\x01\x76\xcc\x72\xc6\xc9\xa0\xaf\x11\
\x44\x73\x80\x39\x25\x93\x6a\xde\x07\x3e\x06\x95\x29\x50\x2d\x91\
\x0e\xa6\xe5\x02\x9e\xad\xe5\x1d\x60\x69\xbe\xbb\x23\x66\xdd\x89\
\x15\x40\x0f\xc2\x10\x8e\x82\x1a\x23\xfd\x7e\x8e\x53\x36\x04\x0e\
\xcc\x1c\x69\x50\x25\x5e\x50\xe7\x5a\xc4\x12\xe0\xfb\xe4\x1e\xf9\
\x6b\x81\xbf\x67\x7e\x6e\x00\xac\x07\x0c\xc4\xfb\x02\x1a\xfd\x01\
\x0e\x78\x0f\xb8\x22\x95\x72\xe5\xfd\x8a\xe3\xd7\xa7\xa7\x13\x17\
\x04\xe9\x61\x64\x0a\x71\x8c\x05\xee\x26\xb7\x93\xae\xad\xd4\x03\
\xb7\x5a\xc0\x6f\xe5\x82\x3a\x70\x45\x40\x11\xc6\x60\xc4\xfa\xc0\
\xfa\xf8\x82\x23\x35\x60\xaf\x12\xb9\x8f\x08\x4c\xe3\xef\x89\x8b\
\x7d\xf4\x74\x62\x15\xde\x33\x79\x1b\x58\xc6\xba\x9b\xf2\xd0\x58\
\xb7\x3f\xe0\x22\x89\xf2\xf1\x53\x8b\x61\xd5\x1a\x80\xcf\xf1\x69\
\xbd\x31\xbd\x94\xd8\x09\xd8\x03\x31\x1f\x09\x78\x18\xd6\xb9\xee\
\x5e\xa6\x74\x37\xbf\x91\xe2\xd2\xdd\x7d\x91\x78\x0a\xd0\x03\xf1\
\xd3\x00\x0d\x07\x1b\x07\xfc\x00\xd8\x06\x18\x86\xaf\xc4\xd3\x5a\
\xef\xfc\xaa\xba\xfd\x71\xe9\xee\x3e\x4b\xac\x00\x7a\x28\x25\x13\
\xab\x41\x98\x05\x0c\x94\x17\xfe\xad\x80\xed\x32\xc7\xe8\xcc\xbf\
\x87\xe1\xc3\x83\x4d\xa7\x7a\xb5\x78\xb3\xff\xe2\x78\xe4\xef\xdb\
\xc4\x0a\xa0\x17\x91\xd9\x0e\x3c\x94\x18\x88\xf7\xe2\x6f\x07\x8c\
\x59\xed\xd8\x02\x58\x01\x4c\x0d\x8c\x1b\x04\xa5\xb1\xf0\xf7\x6d\
\xfe\x1f\xa2\x21\x12\xff\x02\xa3\x05\xd8\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x08\
\x0a\x61\x42\x7f\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x6c\x63\x5b\x57\x3d\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 65.077922 | 103 | 0.727267 | 40,004 | 165,363 | 3.005324 | 0.007699 | 0.411429 | 0.593263 | 0.768559 | 0.341036 | 0.331354 | 0.324259 | 0.281738 | 0.276548 | 0.275226 | 0 | 0.341346 | 0.015892 | 165,363 | 2,540 | 104 | 65.103543 | 0.397431 | 0.000919 | 0 | 0.175515 | 0 | 0.986926 | 0.000006 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0.000792 | false | 0 | 0.000396 | 0 | 0.001189 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b95a5c73feee0ec1db74710f3e85a452641f8c06 | 877 | py | Python | robotframework-ls/tests/robotframework_ls_tests/completions/test_snippets_completions.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 92 | 2020-01-22T22:15:29.000Z | 2022-03-31T05:19:16.000Z | robotframework-ls/tests/robotframework_ls_tests/completions/test_snippets_completions.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 604 | 2020-01-25T17:13:27.000Z | 2022-03-31T18:58:24.000Z | robotframework-ls/tests/robotframework_ls_tests/completions/test_snippets_completions.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 39 | 2020-02-06T00:38:06.000Z | 2022-03-15T06:14:19.000Z | def test_snippets_completions(data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.robot_workspace import RobotDocument
from robotframework_ls.impl import snippets_completions
doc = RobotDocument("unused", source="""for""")
completions = snippets_completions.complete(CompletionContext(doc))
data_regression.check(completions)
def test_snippets_completions2(data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.robot_workspace import RobotDocument
from robotframework_ls.impl import snippets_completions
doc = RobotDocument("unused", source="""FoR""")
completions = snippets_completions.complete(CompletionContext(doc))
data_regression.check(completions, basename="test_snippets_completions")
| 41.761905 | 76 | 0.814139 | 92 | 877 | 7.5 | 0.271739 | 0.165217 | 0.173913 | 0.208696 | 0.878261 | 0.878261 | 0.878261 | 0.878261 | 0.878261 | 0.878261 | 0 | 0.001285 | 0.112885 | 877 | 20 | 77 | 43.85 | 0.885604 | 0 | 0 | 0.571429 | 0 | 0 | 0.049031 | 0.028506 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.428571 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
b963a22a676110fcd0c67bc33abd88af0461b878 | 130 | py | Python | idewavecore/crypto/Cypher.py | idewave/idewavecore | e432c1c4113cff36885c6cb4a2273f38a0c03182 | [
"Apache-2.0"
] | 10 | 2021-02-21T08:24:39.000Z | 2021-02-26T21:20:03.000Z | idewavecore/crypto/Cypher.py | idewave/idewavecore | e432c1c4113cff36885c6cb4a2273f38a0c03182 | [
"Apache-2.0"
] | null | null | null | idewavecore/crypto/Cypher.py | idewave/idewavecore | e432c1c4113cff36885c6cb4a2273f38a0c03182 | [
"Apache-2.0"
] | 1 | 2021-02-21T12:11:32.000Z | 2021-02-21T12:11:32.000Z | class Cypher:
def encrypt(self, data: bytes) -> bytes:
...
def decrypt(self, data: bytes) -> bytes:
...
| 16.25 | 44 | 0.515385 | 14 | 130 | 4.785714 | 0.571429 | 0.238806 | 0.38806 | 0.537313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.323077 | 130 | 7 | 45 | 18.571429 | 0.761364 | 0 | 0 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
b9a184ef3ae43ca603deea890e011ce407e2c823 | 5,346 | py | Python | city_housing_index/index/migrations/0001_initial.py | Sinope-Nanto/city_house | 73589bb07c415b1deecf8a0618b79d376d5a6e88 | [
"MIT"
] | null | null | null | city_housing_index/index/migrations/0001_initial.py | Sinope-Nanto/city_house | 73589bb07c415b1deecf8a0618b79d376d5a6e88 | [
"MIT"
] | null | null | null | city_housing_index/index/migrations/0001_initial.py | Sinope-Nanto/city_house | 73589bb07c415b1deecf8a0618b79d376d5a6e88 | [
"MIT"
] | 1 | 2021-05-05T13:13:56.000Z | 2021-05-05T13:13:56.000Z | # Generated by Django 3.1.5 on 2021-01-26 12:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CalculateResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField(default=0)),
('month', models.IntegerField(default=0)),
('city_or_area', models.BooleanField(default=True)),
('area', models.IntegerField(default=-1)),
('city', models.IntegerField(null=True)),
('price', models.FloatField(default=0)),
('price_under_90', models.FloatField(default=0)),
('price_above_144', models.FloatField(default=0)),
('price_90_144', models.FloatField(default=0)),
('index_value', models.FloatField(default=0)),
('index_value_under90', models.FloatField(default=0)),
('index_value_above144', models.FloatField(default=0)),
('index_value_90144', models.FloatField(default=0)),
('area_volume', models.FloatField(default=0, help_text='总成交面积')),
('area_volume_under_90', models.FloatField(default=0, help_text='总成交面积u90')),
('area_volume_above_144', models.FloatField(default=0, help_text='总成交面积a144')),
('area_volume_90_144', models.FloatField(default=0, help_text='总成交面积90-144')),
('trade_volume', models.IntegerField(default=0)),
('volume_year_on_year', models.FloatField(default=0)),
('volume_chain', models.FloatField(default=0)),
('year_on_year_index', models.FloatField(default=0)),
('chain_index', models.FloatField(default=0)),
('year_on_year_index_above144', models.FloatField(default=0)),
('chain_index_above144', models.FloatField(default=0)),
('year_on_year_index_under90', models.FloatField(default=0)),
('chain_index_under90', models.FloatField(default=0)),
('year_on_year_index_90144', models.FloatField(default=0)),
('chain_index_90144', models.FloatField(default=0)),
],
),
migrations.CreateModel(
name='CalculateResultBase09',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField(default=0)),
('month', models.IntegerField(default=0)),
('price', models.FloatField(default=0)),
('trade_volume', models.IntegerField(default=0)),
('volume_year_on_year', models.FloatField(default=0)),
('volume_chain', models.FloatField(default=0)),
('city_or_area', models.BooleanField(default=True)),
('area', models.IntegerField(default=-1)),
('city', models.IntegerField(null=True)),
('price_under_90', models.FloatField(default=0)),
('price_above_144', models.FloatField(default=0)),
('price_90_144', models.FloatField(default=0)),
('year_on_year_index', models.FloatField(default=0)),
('chain_index', models.FloatField(default=0)),
('year_on_year_index_above144', models.FloatField(default=0)),
('chain_index_above144', models.FloatField(default=0)),
('year_on_year_index_under90', models.FloatField(default=0)),
('chain_index_under90', models.FloatField(default=0)),
('year_on_year_index_90144', models.FloatField(default=0)),
('chain_index_90144', models.FloatField(default=0)),
('index_value', models.FloatField(default=0)),
('index_value_under90', models.FloatField(default=0)),
('index_value_above144', models.FloatField(default=0)),
('index_value_90144', models.FloatField(default=0)),
],
),
migrations.CreateModel(
name='CityIndex',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.IntegerField(default=0)),
('year', models.IntegerField(default=0)),
('month', models.IntegerField(default=0)),
('price', models.FloatField(default=0)),
('trade_volume', models.IntegerField(default=0)),
('area_volume', models.FloatField(default=0, help_text='总成交面积')),
('area_volume_under_90', models.FloatField(default=0, help_text='总成交面积u90')),
('area_volume_above_144', models.FloatField(default=0, help_text='总成交面积a144')),
('area_volume_90_144', models.FloatField(default=0, help_text='总成交面积90-144')),
('price_under_90', models.FloatField(default=0)),
('price_above_144', models.FloatField(default=0)),
('price_90_144', models.FloatField(default=0)),
],
),
]
| 55.113402 | 114 | 0.582679 | 540 | 5,346 | 5.531481 | 0.125926 | 0.15534 | 0.369602 | 0.385671 | 0.921661 | 0.913626 | 0.913626 | 0.913626 | 0.896887 | 0.896887 | 0 | 0.053077 | 0.270483 | 5,346 | 96 | 115 | 55.6875 | 0.712821 | 0.008418 | 0 | 0.876404 | 1 | 0 | 0.193999 | 0.040951 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.011236 | 0 | 0.05618 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
b9dbfebe1ce4c81d04aa5f6a873d984abf4c35a6 | 60,920 | py | Python | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/blob_containers_operations.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/blob_containers_operations.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/blob_containers_operations.py | xiafu-msft/azure-sdk-for-python | 4d9560cfd519ee60667f3cc2f5295a58c18625db | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class BlobContainersOperations(object):
"""BlobContainersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2019-04-01".
:ivar immutability_policy_name: The name of the blob container immutabilityPolicy within the specified storage account. ImmutabilityPolicy Name must be 'default'. Constant value: "default".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-04-01"
self.immutability_policy_name = "default"
self.config = config
def list(
self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config):
"""Lists all containers and does not support a prefix like data plane.
Also SRP today does not return continuation token.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ListContainerItems or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ListContainerItems or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ListContainerItems', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers'}
def create(
self, resource_group_name, account_name, container_name, public_access=None, metadata=None, custom_headers=None, raw=False, **operation_config):
"""Creates a new container under the specified account as described by
request body. The container resource includes metadata and properties
for that container. It does not include a list of the blobs contained
by the container. .
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param public_access: Specifies whether data in the container may be
accessed publicly and the level of access. Possible values include:
'Container', 'Blob', 'None'
:type public_access: str or
~azure.mgmt.storage.v2019_04_01.models.PublicAccess
:param metadata: A name-value pair to associate with the container as
metadata.
:type metadata: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BlobContainer or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
blob_container = models.BlobContainer(public_access=public_access, metadata=metadata)
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(blob_container, 'BlobContainer')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BlobContainer', response)
if response.status_code == 201:
deserialized = self._deserialize('BlobContainer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'}
def update(
self, resource_group_name, account_name, container_name, public_access=None, metadata=None, custom_headers=None, raw=False, **operation_config):
"""Updates container properties as specified in request body. Properties
not mentioned in the request will be unchanged. Update fails if the
specified container doesn't already exist. .
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param public_access: Specifies whether data in the container may be
accessed publicly and the level of access. Possible values include:
'Container', 'Blob', 'None'
:type public_access: str or
~azure.mgmt.storage.v2019_04_01.models.PublicAccess
:param metadata: A name-value pair to associate with the container as
metadata.
:type metadata: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BlobContainer or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
blob_container = models.BlobContainer(public_access=public_access, metadata=metadata)
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(blob_container, 'BlobContainer')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BlobContainer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'}
def get(
self, resource_group_name, account_name, container_name, custom_headers=None, raw=False, **operation_config):
"""Gets properties of a specified container. .
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BlobContainer or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BlobContainer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'}
def delete(
self, resource_group_name, account_name, container_name, custom_headers=None, raw=False, **operation_config):
"""Deletes specified container under its account.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'}
def set_legal_hold(
self, resource_group_name, account_name, container_name, tags, custom_headers=None, raw=False, **operation_config):
"""Sets legal hold tags. Setting the same tag results in an idempotent
operation. SetLegalHold follows an append pattern and does not clear
out the existing tags that are not specified in the request.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param tags: Each tag should be 3 to 23 alphanumeric characters and is
normalized to lower case at SRP.
:type tags: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: LegalHold or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LegalHold or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
legal_hold = models.LegalHold(tags=tags)
# Construct URL
url = self.set_legal_hold.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(legal_hold, 'LegalHold')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LegalHold', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
set_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold'}
def clear_legal_hold(
self, resource_group_name, account_name, container_name, tags, custom_headers=None, raw=False, **operation_config):
"""Clears legal hold tags. Clearing the same or non-existent tag results
in an idempotent operation. ClearLegalHold clears out only the
specified tags in the request.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param tags: Each tag should be 3 to 23 alphanumeric characters and is
normalized to lower case at SRP.
:type tags: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: LegalHold or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LegalHold or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
legal_hold = models.LegalHold(tags=tags)
# Construct URL
url = self.clear_legal_hold.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(legal_hold, 'LegalHold')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LegalHold', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
clear_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold'}
def create_or_update_immutability_policy(
self, resource_group_name, account_name, container_name, immutability_period_since_creation_in_days, if_match=None, custom_headers=None, raw=False, **operation_config):
"""Creates or updates an unlocked immutability policy. ETag in If-Match is
honored if given but not required for this operation.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param immutability_period_since_creation_in_days: The immutability
period for the blobs in the container since the policy creation, in
days.
:type immutability_period_since_creation_in_days: int
:param if_match: The entity state (ETag) version of the immutability
policy to update. A value of "*" can be used to apply the operation
only if the immutability policy already exists. If omitted, this
operation will always be applied.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImmutabilityPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = None
if immutability_period_since_creation_in_days is not None:
parameters = models.ImmutabilityPolicy(immutability_period_since_creation_in_days=immutability_period_since_creation_in_days)
# Construct URL
url = self.create_or_update_immutability_policy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'immutabilityPolicyName': self._serialize.url("self.immutability_policy_name", self.immutability_policy_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if parameters is not None:
body_content = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ImmutabilityPolicy', response)
header_dict = {
'ETag': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_or_update_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'}
def get_immutability_policy(
self, resource_group_name, account_name, container_name, if_match=None, custom_headers=None, raw=False, **operation_config):
"""Gets the existing immutability policy along with the corresponding ETag
in response headers and body.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability
policy to update. A value of "*" can be used to apply the operation
only if the immutability policy already exists. If omitted, this
operation will always be applied.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImmutabilityPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_immutability_policy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'immutabilityPolicyName': self._serialize.url("self.immutability_policy_name", self.immutability_policy_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ImmutabilityPolicy', response)
header_dict = {
'ETag': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'}
def delete_immutability_policy(
self, resource_group_name, account_name, container_name, if_match, custom_headers=None, raw=False, **operation_config):
"""Aborts an unlocked immutability policy. The response of delete has
immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is
required for this operation. Deleting a locked immutability policy is
not allowed, only way is to delete the container after deleting all
blobs inside the container.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability
policy to update. A value of "*" can be used to apply the operation
only if the immutability policy already exists. If omitted, this
operation will always be applied.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImmutabilityPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.delete_immutability_policy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'immutabilityPolicyName': self._serialize.url("self.immutability_policy_name", self.immutability_policy_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ImmutabilityPolicy', response)
header_dict = {
'ETag': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'}
def lock_immutability_policy(
self, resource_group_name, account_name, container_name, if_match, custom_headers=None, raw=False, **operation_config):
"""Sets the ImmutabilityPolicy to Locked state. The only action allowed on
a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is
required for this operation.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability
policy to update. A value of "*" can be used to apply the operation
only if the immutability policy already exists. If omitted, this
operation will always be applied.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImmutabilityPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.lock_immutability_policy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ImmutabilityPolicy', response)
header_dict = {
'ETag': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lock_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock'}
def extend_immutability_policy(
self, resource_group_name, account_name, container_name, if_match, immutability_period_since_creation_in_days, custom_headers=None, raw=False, **operation_config):
"""Extends the immutabilityPeriodSinceCreationInDays of a locked
immutabilityPolicy. The only action allowed on a Locked policy will be
this action. ETag in If-Match is required for this operation.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability
policy to update. A value of "*" can be used to apply the operation
only if the immutability policy already exists. If omitted, this
operation will always be applied.
:type if_match: str
:param immutability_period_since_creation_in_days: The immutability
period for the blobs in the container since the policy creation, in
days.
:type immutability_period_since_creation_in_days: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImmutabilityPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = None
if immutability_period_since_creation_in_days is not None:
parameters = models.ImmutabilityPolicy(immutability_period_since_creation_in_days=immutability_period_since_creation_in_days)
# Construct URL
url = self.extend_immutability_policy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if parameters is not None:
body_content = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ImmutabilityPolicy', response)
header_dict = {
'ETag': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
extend_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend'}
def lease(
self, resource_group_name, account_name, container_name, parameters=None, custom_headers=None, raw=False, **operation_config):
"""The Lease Container operation establishes and manages a lock on a
container for delete operations. The lock duration can be 15 to 60
seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the
specified resource group. Storage account names must be between 3 and
24 characters in length and use numbers and lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the
specified storage account. Blob container names must be between 3 and
63 characters in length and use numbers, lower-case letters and dash
(-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type container_name: str
:param parameters: Lease Container request body.
:type parameters:
~azure.mgmt.storage.v2019_04_01.models.LeaseContainerRequest
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: LeaseContainerResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LeaseContainerResponse
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.lease.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if parameters is not None:
body_content = self._serialize.body(parameters, 'LeaseContainerRequest')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LeaseContainerResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease'}
| 53.863837 | 281 | 0.687508 | 7,123 | 60,920 | 5.699144 | 0.04675 | 0.029782 | 0.02722 | 0.012489 | 0.934672 | 0.931223 | 0.925311 | 0.921862 | 0.916468 | 0.915162 | 0 | 0.009857 | 0.222275 | 60,920 | 1,130 | 282 | 53.911504 | 0.846958 | 0.362935 | 0 | 0.83815 | 0 | 0.025048 | 0.200348 | 0.102263 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026975 | false | 0 | 0.007707 | 0 | 0.086705 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a1452305463d2827fe687928f67cd6104392e32 | 4,319 | py | Python | python/analysis/habernal_comparison/clean_data_metrics.py | UKPLab/tacl2018-preference-convincing | 65eb1cd3bf76f8068889880e0f80178e790350ce | [
"Apache-2.0"
] | 13 | 2019-03-01T19:40:23.000Z | 2022-01-10T05:53:47.000Z | python/analysis/habernal_comparison/clean_data_metrics.py | UKPLab/tacl2018-preference-convincing | 65eb1cd3bf76f8068889880e0f80178e790350ce | [
"Apache-2.0"
] | 12 | 2020-11-13T17:54:01.000Z | 2022-02-09T23:39:11.000Z | python/analysis/habernal_comparison/clean_data_metrics.py | UKPLab/tacl2018-preference-convincing | 65eb1cd3bf76f8068889880e0f80178e790350ce | [
"Apache-2.0"
] | 5 | 2019-02-06T12:08:20.000Z | 2022-01-10T20:40:22.000Z | import os
from compute_metrics import compute_metrics
if __name__ == '__main__':
if 'expt_settings' not in globals():
expt_settings = {}
expt_settings['dataset'] = None
expt_settings['folds'] = None
expt_settings['foldorderfile'] = None
npairs = 0
acc = 1.0
di = 0
max_no_folds = 32
# Classification tasks
print('*** Performance metrics for UKPConvArgStrict, ling features ***')
datasets = ['UKPConvArgStrict']
methods = ['SVM', 'SinglePrefGP_noOpt_weaksprior']
feature_types = ['ling']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
print('*** Performance metrics for UKPConvArgStrict, embeddings features ***')
datasets = ['UKPConvArgStrict']
methods = ['BI-LSTM', 'SinglePrefGP_noOpt_weaksprior']
feature_types = ['embeddings']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
print('*** Performance metrics for UKPConvArgStrict, ling+Glove features ***')
datasets = ['UKPConvArgStrict']
methods = ['SVM', 'BI-LSTM', 'SinglePrefGP_noOpt_weaksprior', 'SinglePrefGP_weaksprior',
'SingleGPC_noOpt_weaksprior', 'GP+SVM']
feature_types = ['both']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
# Ranking tasks
print('*** Performance metrics for UKPConvArgAll, ling features ***')
datasets = ['UKPConvArgAll']
methods = ['SVM', 'SinglePrefGP_noOpt_weaksprior']
feature_types = ['ling']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
print('*** Performance metrics for UKPConvArgAll, embeddings features ***')
datasets = ['UKPConvArgAll']
methods = ['BI-LSTM', 'SinglePrefGP_noOpt_weaksprior']
feature_types = ['embeddings']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
print('*** Performance metrics for UKPConvArgAll, ling+Glove features ***')
datasets = ['UKPConvArgAll']
methods = ['SVM', 'BI-LSTM', 'SinglePrefGP_noOpt_weaksprior', 'SinglePrefGP_weaksprior',
'SingleGPC_noOpt_weaksprior', 'GP+SVM']
feature_types = ['both']
embeddings_types = ['word_mean']
results_f1, results_acc, results_auc, results_logloss, results_pearson, results_spearman, results_kendall, \
tr_results_f1, tr_results_acc, tr_results_auc, tr_results_logloss, mean_results, combined_labels \
= compute_metrics(expt_settings, methods, datasets, feature_types, embeddings_types, di=di, npairs=npairs,
max_fold_no=max_no_folds)
print("Completed compute metrics")
| 41.528846 | 112 | 0.719148 | 498 | 4,319 | 5.829317 | 0.128514 | 0.074406 | 0.060627 | 0.053738 | 0.886669 | 0.841199 | 0.807785 | 0.807785 | 0.807785 | 0.807785 | 0 | 0.00508 | 0.179671 | 4,319 | 103 | 113 | 41.932039 | 0.814282 | 0.007872 | 0 | 0.724638 | 0 | 0 | 0.225415 | 0.063537 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.028986 | 0 | 0.028986 | 0.101449 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a26b50cd407fb12e782a99cc98df113a2198f24 | 42 | py | Python | packages/pyright-internal/src/tests/samples/package1/sub.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 4,391 | 2019-05-07T01:18:57.000Z | 2022-03-31T20:45:44.000Z | packages/pyright-internal/src/tests/samples/package1/sub.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 2,740 | 2019-05-07T03:29:30.000Z | 2022-03-31T12:57:46.000Z | packages/pyright-internal/src/tests/samples/package1/sub.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 455 | 2019-05-07T12:55:14.000Z | 2022-03-31T17:09:15.000Z |
def subfoo() -> str:
return 'hello'
| 8.4 | 20 | 0.547619 | 5 | 42 | 4.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.285714 | 42 | 4 | 21 | 10.5 | 0.766667 | 0 | 0 | 0 | 0 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
dbe501d9160b99b78b862c3aa7b939fc3b067686 | 4,177 | py | Python | Proyecto/compilation/ast/relations.py | leynier/IA-Sim-Com | f6e99bb1aa4b02d5d558dc76a9bf802c3761e428 | [
"MIT"
] | null | null | null | Proyecto/compilation/ast/relations.py | leynier/IA-Sim-Com | f6e99bb1aa4b02d5d558dc76a9bf802c3761e428 | [
"MIT"
] | 1 | 2022-02-11T07:26:54.000Z | 2022-02-11T07:26:54.000Z | Proyecto/compilation/ast/relations.py | leynier/IA-Sim-Com | f6e99bb1aa4b02d5d558dc76a9bf802c3761e428 | [
"MIT"
] | null | null | null | from compilation.ast.operations import BinOp
from compilation.ast.nodes import Node
from compilation.context import Context
from compilation.errors import CheckTypesError
class Rel(BinOp):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def checktype(self, context):
checkexpr1 = self.left_node.checktype(context)
if isinstance(checkexpr1, CheckTypesError):
return checkexpr1
checkexpr2 = self.right_node.checktype(context)
if isinstance(checkexpr2, CheckTypesError):
return checkexpr2
if checkexpr1 == checkexpr2:
return True
return CheckTypesError("cannot compare expressions with different types", "", -1, -1)
@staticmethod
def type() -> str:
return "EQ"
class EqRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI == exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "EQ"
class NeqRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI != exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "NEQ"
class LessRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI < exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "LESS"
class LeqRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI <= exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "LEQ"
class GreatRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI > exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "GREAT"
class GreqRel(Rel):
def __init__(self, left_node: Node, right_node: Node):
super().__init__(left_node, right_node)
def eval(self, context: Context):
exprNI = self.left_node.eval(context)
if isinstance(exprNI, RuntimeError):
return exprNI
exprND = self.right_node.eval(context)
if isinstance(exprND, RuntimeError):
return exprND
if exprNI >= exprND:
return True
else:
return False
@staticmethod
def type() -> str:
return "GREQ"
| 26.605096 | 93 | 0.609289 | 455 | 4,177 | 5.378022 | 0.118681 | 0.068656 | 0.068656 | 0.083367 | 0.805476 | 0.779322 | 0.779322 | 0.762158 | 0.762158 | 0.762158 | 0 | 0.003431 | 0.302131 | 4,177 | 156 | 94 | 26.775641 | 0.836021 | 0 | 0 | 0.745902 | 0 | 0 | 0.016758 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.172131 | false | 0 | 0.032787 | 0.057377 | 0.54918 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
e001c8f32ec3d860ddaf337a0b0b2b1628990230 | 124,121 | py | Python | tests/adapters/switches/juniper_mx_test.py | FrancoisLopez/netman | a40d3235f7ea0cdaf52daab97b0d5ad20857b00e | [
"Apache-2.0"
] | 38 | 2015-11-30T10:11:42.000Z | 2022-02-10T18:31:44.000Z | tests/adapters/switches/juniper_mx_test.py | FrancoisLopez/netman | a40d3235f7ea0cdaf52daab97b0d5ad20857b00e | [
"Apache-2.0"
] | 143 | 2015-12-10T19:00:42.000Z | 2020-08-20T13:51:42.000Z | tests/adapters/switches/juniper_mx_test.py | FrancoisLopez/netman | a40d3235f7ea0cdaf52daab97b0d5ad20857b00e | [
"Apache-2.0"
] | 15 | 2015-12-14T23:03:30.000Z | 2019-01-15T19:35:45.000Z | # Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import textwrap
import unittest
from flexmock import flexmock, flexmock_teardown
from hamcrest import assert_that, equal_to, instance_of, contains_string, has_length, is_
from ncclient.operations import RPCError
from ncclient.xml_ import to_ele
from netaddr import IPAddress, IPNetwork
from netman.adapters.switches.juniper.base import Juniper
from netman.adapters.switches.juniper.mx import netconf
from netman.core.objects.access_groups import IN, OUT
from netman.core.objects.exceptions import VlanAlreadyExist, BadVlanNumber, BadVlanName, UnknownVlan, \
IPAlreadySet, UnknownIP, InterfaceInWrongPortMode, AccessVlanNotSet, UnknownInterface, TrunkVlanNotSet, \
VrrpDoesNotExistForVlan
from netman.core.objects.port_modes import ACCESS
from netman.core.objects.switch_descriptor import SwitchDescriptor
from netman.core.switch_factory import RealSwitchFactory
from tests.adapters.switches.juniper_test import an_ok_response, is_xml, a_configuration, an_rpc_response
def test_factory():
switch = RealSwitchFactory().get_switch_by_descriptor(
SwitchDescriptor(hostname='hostname', model='juniper_mx', username='username', password='password', port=22)
)
assert_that(switch, instance_of(Juniper))
assert_that(switch.switch_descriptor.hostname, equal_to("hostname"))
assert_that(switch.switch_descriptor.model, equal_to("juniper_mx"))
assert_that(switch.switch_descriptor.username, equal_to("username"))
assert_that(switch.switch_descriptor.password, equal_to("password"))
assert_that(switch.switch_descriptor.port, equal_to(22))
class JuniperMXTest(unittest.TestCase):
def setUp(self):
self.switch = netconf(SwitchDescriptor(model='juniper_mx', hostname="toto"))
self.netconf_mock = flexmock()
self.switch.netconf = self.netconf_mock
self.switch.in_transaction = True
def tearDown(self):
flexmock_teardown()
def test_add_vlan(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>900</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
<description>Shizzle</description>
</domain>
</bridge-domains>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_vlan(1000, name="Shizzle")
def test_add_vlan_already_in_use_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VlanAlreadyExist) as expect:
self.switch.add_vlan(1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 already exist"))
def test_add_existing_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VlanAlreadyExist) as expect:
self.switch.add_vlan(1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 already exist"))
def test_add_vlan_bad_vlan_id(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN9000</name>
<vlan-id>9000</vlan-id>
</domain>
</bridge-domains>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/15.1R4/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">>
<error-severity>error</error-severity>
<error-info>
<bad-element>domain</bad-element>
</error-info>
<error-message>Value 9000 is not within range (1..4094)</error-message>
</rpc-error>
"""))))
with self.assertRaises(BadVlanNumber) as expect:
self.switch.add_vlan(9000)
assert_that(str(expect.exception), equal_to("Vlan number is invalid"))
def test_add_vlan_empty_vlan_name(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
<description></description>
</domain>
</bridge-domains>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:junos="http://xml.juniper.net/junos/15.1R4/junos"
xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-type>protocol</error-type>
<error-tag>operation-failed</error-tag>
<error-severity>error</error-severity>
<error-message>description: '': Must be a string of 255 characters or less</error-message>
<error-info>
<bad-element>domain</bad-element>
</error-info>
</rpc-error>
"""))))
with self.assertRaises(BadVlanName) as expect:
self.switch.add_vlan(1000, "")
assert_that(str(expect.exception), equal_to("Vlan name is invalid"))
def test_add_vlan_too_long_vlan_name(self):
long_string = 'a' * 256
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
<description>{}</description>
</domain>
</bridge-domains>
</configuration>
</config>
""".format(long_string))).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:junos="http://xml.juniper.net/junos/15.1R4/junos"
xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-type>protocol</error-type>
<error-tag>operation-failed</error-tag>
<error-severity>error</error-severity>
<error-message>description: '{}': Must be a string of 255 characters or less</error-message>
<error-info>
<bad-element>domain</bad-element>
</error-info>
</rpc-error>
""".format(long_string)))))
with self.assertRaises(BadVlanName) as expect:
self.switch.add_vlan(1000, long_string)
assert_that(str(expect.exception), equal_to("Vlan name is invalid"))
def test_add_vlan_raises_RPCError(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:junos="http://xml.juniper.net/junos/15.1R4/junos"
xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-type>protocol</error-type>
<error-tag>operation-failed</error-tag>
<error-severity>error</error-severity>
<error-message>There's another problem</error-message>
<error-info>
<bad-element>domain</bad-element>
</error-info>
</rpc-error>
"""))))
with self.assertRaises(RPCError):
self.switch.add_vlan(1000, 'a' * 256)
def test_remove_vlan_ignores_removing_interface_not_created(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain operation="delete">
<name>STANDARD</name>
</domain>
</bridge-domains>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_invalid_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>ANOTHER</name>
<vlan-id>10</vlan-id>
</domain>
</bridge-domains>
"""))
with self.assertRaises(UnknownVlan) as expect:
self.switch.remove_vlan(20)
assert_that(str(expect.exception), equal_to("Vlan 20 not found"))
def test_remove_vlan_also_removes_associated_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains/>
<interfaces/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>MEH</name>
<vlan-id>5</vlan-id>
</domain>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<routing-interface>irb.25</routing-interface>
</domain>
<domain>
<name>MEH2</name>
<vlan-id>15</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain operation="delete">
<name>STANDARD</name>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit operation="delete">
<name>25</name>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_in_use_deletes_all_usages(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>9</vlan-id-list>
<vlan-id-list>10</vlan-id-list>
<vlan-id-list>11</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/2</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>9-15</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/3</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id-list>12</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/4</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>STANDARD</vlan-id>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/5</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>ANOTHER_NAME</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain operation="delete">
<name>STANDARD</name>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list operation="delete">10</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/2</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list operation="delete">9-15</vlan-id-list>
<vlan-id-list>9</vlan-id-list>
<vlan-id-list>11-15</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/4</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id operation="delete">STANDARD</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_delete_usage_and_interface_at_same_time(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<routing-interface>irb.10</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>name</name>
<unit>
<name>10</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<port-mode>trunk</port-mode>
<vlan-id-list>10</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain operation="delete">
<name>STANDARD</name>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit operation="delete">
<name>10</name>
</unit>
</interface>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list operation="delete">10</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_get_vlans(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<description>my-description</description>
</domain>
<domain>
<name>NO-VLAN-ID</name>
<description>shizzle</description>
</domain>
<domain>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<routing-interface>irb.20</routing-interface>
</domain>
<domain>
<name>WITH-IF-MULTI-IP</name>
<vlan-id>40</vlan-id>
<routing-interface>irb.70</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan10, vlan20, vlan40 = self.switch.get_vlans()
assert_that(vlan10.number, equal_to(10))
assert_that(vlan10.name, equal_to("my-description"))
assert_that(vlan10.access_groups[IN], equal_to(None))
assert_that(vlan10.access_groups[OUT], equal_to(None))
assert_that(vlan10.ips, has_length(0))
assert_that(vlan20.number, equal_to(20))
assert_that(vlan20.name, equal_to(None))
assert_that(vlan20.access_groups[IN], equal_to("AC-IN"))
assert_that(vlan20.access_groups[OUT], equal_to("AC-OUT"))
assert_that(vlan20.ips, has_length(1))
vlan20ip1 = vlan20.ips[0]
assert_that(str(vlan20ip1.ip), equal_to("1.1.1.1"))
assert_that(vlan20ip1.prefixlen, equal_to(24))
assert_that(vlan40.number, equal_to(40))
assert_that(vlan40.name, equal_to(None))
assert_that(vlan40.access_groups[IN], equal_to(None))
assert_that(vlan40.access_groups[OUT], equal_to(None))
vlan40ip1, vlan40ip2, vlan40ip3 = vlan40.ips
assert_that(str(vlan40ip1.ip), equal_to("2.1.1.1"))
assert_that(vlan40ip1.prefixlen, equal_to(24))
assert_that(str(vlan40ip2.ip), equal_to("3.1.1.1"))
assert_that(vlan40ip2.prefixlen, equal_to(24))
assert_that(str(vlan40ip3.ip), equal_to("4.1.1.1"))
assert_that(vlan40ip3.prefixlen, equal_to(24))
def test_get_vlan_with_interface_multi_ip(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>This-another-clam</name>
<vlan-id>39</vlan-id>
<routing-interface>irb.20</routing-interface>
</domain>
<domain>
<name>WITH-IF-MULTI-IP</name>
<vlan-id>40</vlan-id>
<routing-interface>irb.70</routing-interface>
</domain>
<domain>
<name>This-yet-another-clam</name>
<vlan-id>41</vlan-id>
<routing-interface>irb.40</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/1</name>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(40)
assert_that(vlan.number, equal_to(40))
assert_that(vlan.name, equal_to(None))
assert_that(vlan.access_groups[IN], equal_to(None))
assert_that(vlan.access_groups[OUT], equal_to(None))
assert_that(vlan.icmp_redirects, equal_to(True))
vlanip1, vlanip2, vlanip3 = vlan.ips
assert_that(str(vlanip1.ip), equal_to("2.1.1.1"))
assert_that(vlanip1.prefixlen, equal_to(24))
assert_that(str(vlanip2.ip), equal_to("3.1.1.1"))
assert_that(vlanip2.prefixlen, equal_to(24))
assert_that(str(vlanip3.ip), equal_to("4.1.1.1"))
assert_that(vlanip3.prefixlen, equal_to(24))
def test_get_vlan_with_no_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<description>my-description</description>
</domain>
</bridge-domains>
"""))
vlan = self.switch.get_vlan(10)
assert_that(vlan.number, equal_to(10))
assert_that(vlan.name, equal_to("my-description"))
assert_that(vlan.access_groups[IN], equal_to(None))
assert_that(vlan.access_groups[OUT], equal_to(None))
assert_that(vlan.icmp_redirects, equal_to(True))
assert_that(vlan.ips, has_length(0))
def test_get_vlan_with_unknown_vlan(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>This-another-clam</name>
<vlan-id>39</vlan-id>
</domain>
</bridge-domains>
"""))
with self.assertRaises(UnknownVlan) as expect:
self.switch.get_vlan(10)
assert_that(str(expect.exception), equal_to("Vlan 10 not found"))
def test_get_vlan_with_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<routing-interface>irb.20</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/1</name>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<no-redirects />
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(20)
assert_that(vlan.number, equal_to(20))
assert_that(vlan.name, equal_to(None))
assert_that(vlan.access_groups[IN], equal_to("AC-IN"))
assert_that(vlan.access_groups[OUT], equal_to("AC-OUT"))
assert_that(vlan.icmp_redirects, equal_to(False))
assert_that(vlan.ips, has_length(1))
vlan20ip1 = vlan.ips[0]
assert_that(str(vlan20ip1.ip), equal_to("1.1.1.1"))
assert_that(vlan20ip1.prefixlen, equal_to(24))
def test_get_vlan_with_vrrp(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<routing-interface>irb.20</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.2/24</name>
<vrrp-group>
<name>1</name>
<virtual-address>1.1.1.1</virtual-address>
<priority>90</priority>
<preempt>
<hold-time>60</hold-time>
</preempt>
<accept-data/>
<authentication-type>simple</authentication-type>
<authentication-key>$9$1/aElvwsgoaGz3reKvLX.Pf5n/</authentication-key>
<track>
<route>
<route_address>0.0.0.0/0</route_address>
<routing-instance>default</routing-instance>
<priority-cost>50</priority-cost>
</route>
</track>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(20)
vrrp = vlan.vrrp_groups[0]
assert_that(vrrp.id, is_(1))
assert_that(vrrp.ips, has_length(1))
assert_that(vrrp.ips[0], is_(IPAddress('1.1.1.1')))
assert_that(vrrp.priority, is_(90))
assert_that(vrrp.hello_interval, is_(None))
assert_that(vrrp.dead_interval, is_(None))
assert_that(vrrp.track_id, is_("0.0.0.0/0"))
assert_that(vrrp.track_decrement, is_(50))
def test_get_vlan_with_vrrp_without_optional_fields_and_multiple_vips(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<bridge-domains />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<routing-interface>irb.20</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.2/24</name>
<vrrp-group>
<name>1</name>
<virtual-address>1.1.1.1</virtual-address>
<virtual-address>1.1.1.3</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(20)
vrrp = vlan.vrrp_groups[0]
assert_that(vrrp.id, is_(1))
assert_that(vrrp.ips, has_length(2))
assert_that(vrrp.ips[0], is_(IPAddress('1.1.1.1')))
assert_that(vrrp.ips[1], is_(IPAddress('1.1.1.3')))
assert_that(vrrp.priority, is_(None))
assert_that(vrrp.hello_interval, is_(None))
assert_that(vrrp.dead_interval, is_(None))
assert_that(vrrp.track_id, is_(None))
assert_that(vrrp.track_decrement, is_(None))
def test_add_vrrp_success(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
<vrrp-group>
<name>1</name>
<priority>110</priority>
<preempt>
<hold-time>60</hold-time>
</preempt>
<accept-data/>
<authentication-type>simple</authentication-type>
<authentication-key>VLAN1234</authentication-key>
<track>
<route>
<route_address>0.0.0.0/0</route_address>
<routing-instance>default</routing-instance>
<priority-cost>50</priority-cost>
</route>
</track>
<virtual-address>3.3.3.1</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("3.3.3.1")], priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_multiple_ips(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
<vrrp-group>
<name>1</name>
<priority>110</priority>
<preempt>
<hold-time>60</hold-time>
</preempt>
<accept-data/>
<authentication-type>simple</authentication-type>
<authentication-key>VLAN1234</authentication-key>
<track>
<route>
<route_address>0.0.0.0/0</route_address>
<routing-instance>default</routing-instance>
<priority-cost>50</priority-cost>
</route>
</track>
<virtual-address>3.3.3.1</virtual-address>
<virtual-address>3.3.3.3</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("3.3.3.1"), IPAddress("3.3.3.3")], priority=110,
track_id="0.0.0.0/0", track_decrement=50)
def test_add_vrrp_fails_when_vlan_not_found(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration())
with self.assertRaises(UnknownVlan):
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("3.3.3.1")], priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_adds_it_to_the_good_address(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
<address>
<name>4.4.4.2/27</name>
</address>
<address>
<name>5.5.5.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>4.4.4.2/27</name>
<vrrp-group>
<name>1</name>
<priority>110</priority>
<preempt>
<hold-time>60</hold-time>
</preempt>
<accept-data/>
<authentication-type>simple</authentication-type>
<authentication-key>VLAN1234</authentication-key>
<track>
<route>
<route_address>0.0.0.0/0</route_address>
<routing-instance>default</routing-instance>
<priority-cost>50</priority-cost>
</route>
</track>
<virtual-address>4.4.4.1</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("4.4.4.1")], priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_adds_it_if_all_ips_are_within_a_single_address(self):
self.netconf_mock.should_receive("get_config").and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>1.1.1.1/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once()
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("1.1.1.2"), IPAddress("1.1.1.3")],
priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_fails_when_the_ips_doesnt_belong_to_an_existing_address(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownIP):
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("4.4.4.1")], priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_fail_if_all_ips_are_not_in_the_same_address(self):
self.netconf_mock.should_receive("get_config").and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>1.1.1.1/27</name>
</address>
<address>
<name>2.2.2.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownIP):
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("1.1.1.2"), IPAddress("2.2.2.3")],
priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_add_vrrp_fails_when_any_of_the_ips_doesnt_belong_to_an_existing_address(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownIP):
self.switch.add_vrrp_group(1234, 1, ips=[IPAddress("3.3.3.1"), IPAddress("4.4.4.1")], priority=110, track_id="0.0.0.0/0",
track_decrement=50)
def test_remove_vrrp_success(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>192.0.1.1/27</name>
<vrrp-group>
<name>1</name>
<virtual-address>192.0.1.2</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>192.0.1.1/27</name>
<vrrp-group operation="delete">
<name>1</name>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_vrrp_group(1234, 1)
def test_remove_vrrp_with_invalid_group_id(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>192.0.1.1/27</name>
<vrrp-group>
<name>99</name>
<virtual-address>192.0.1.2</virtual-address>
</vrrp-group>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VrrpDoesNotExistForVlan) as expect:
self.switch.remove_vrrp_group(1234, 1)
assert_that(str(expect.exception), equal_to("Vrrp group 1 does not exist for vlan 1234"))
def test_remove_vrrp_from_unknown_vlan(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration())
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.remove_vrrp_group(1234, 2)
assert_that(str(expect.exception), equal_to("Vlan 1234 not found"))
def test_add_ip_to_vlan(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
<routing-interface>irb.1234</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_ip_to_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_add_ip_to_vlan_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration())
with self.assertRaises(UnknownVlan):
self.switch.add_ip_to_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_add_ip_to_vlan_ip_already_exists_in_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(IPAlreadySet):
self.switch.add_ip_to_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_set_icmp_redirect_state_false_not_set_adds_statement(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
<routing-interface>irb.1234</routing-interface>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_vlan_icmp_redirects_state(vlan_number=1234, state=False)
def test_set_icmp_redirect_state_false_already_set_dont_do_anything(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>4094</name>
<family>
<inet>
<no-redirects/>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_vlan_icmp_redirects_state(vlan_number=1234, state=False)
def test_set_icmp_redirect_state_true_not_set_dont_do_anything(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces/>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_vlan_icmp_redirects_state(vlan_number=1234, state=True)
def test_set_icmp_redirect_state_true_already_set_remove_statement(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN1234</name>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>4094</name>
<family>
<inet>
<no-redirects/>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects operation="delete" />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
"""))
self.switch.set_vlan_icmp_redirects_state(vlan_number=1234, state=True)
def test_set_icmp_redirect_unknow_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<bridge-domains>
<domain>
<vlan-id>1234</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<no-redirects />
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration())
with self.assertRaises(UnknownVlan):
self.switch.set_vlan_icmp_redirects_state(vlan_number=1234, state=False)
def test_port_mode_access(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
</interfaces>
<bridge-domains/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("xe-0/0/6")
def test_port_mode_access_with_no_mode_and_1_vlan_does_not_remove_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id>2998</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
<bridge-domains/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("xe-0/0/6")
def test_set_access_vlan_on_interface_with_access_mode_and_no_vlan_succeeds_easily(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("xe-0/0/6", 1000)
def test_set_access_vlan_on_interface_that_already_has_it_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_access_vlan("xe-0/0/6", 1000)
def test_set_access_vlan_on_interface_that_has_no_port_mode_sets_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("xe-0/0/6", 1000)
def test_set_access_vlan_on_interface_replaces_vlan_id(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
<domain>
<name>PATATE2</name>
<vlan-id>2000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>2000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("xe-0/0/6", 1000)
def test_set_access_vlan_on_interface_in_trunk_mode_should_raise(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.set_access_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a trunk mode interface"))
def test_set_access_vlan_on_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>3333</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.set_access_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 not found"))
def test_set_access_vlan_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("xe-0/0/6", 1000)
def test_set_access_mode_on_interface_replaces_trunk_info(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
<domain>
<name>PATATE2</name>
<vlan-id>2000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>2000</vlan-id-list>
<vlan-id-list>2000-2000</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id-list operation="delete"/>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("xe-0/0/6")
def test_port_mode_trunk_with_no_port_mode_or_vlan_set_just_sets_the_port_mode(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
</interfaces>
<bridge-domains/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("xe-0/0/6")
def test_set_port_mode_trunk_from_access_removes_vlan_info(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id operation="delete"/>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("xe-0/0/6")
def test_port_mode_trunk_already_in_trunk_mode_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>1000</vlan-id-list>
<vlan-id-list>1001</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_trunk_mode("xe-0/0/6")
def test_add_trunk_vlan_on_interface_adds_to_the_list(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>2000</vlan-id-list>
<vlan-id-list>2100-2200</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list>1000</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_trunk_vlan("xe-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_that_has_no_port_mode_and_no_vlan_sets_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>1000</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_trunk_vlan("xe-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_in_access_mode_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list>500</vlan-id-list>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.add_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_add_trunk_vlan_on_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.add_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 not found"))
def test_add_trunk_vlan_on_interface_that_already_has_it_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>900-1100</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.add_trunk_vlan("xe-0/0/6", 1000)
def test_remove_ip_from_vlan(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address operation="delete">
<name>3.3.3.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_ip_from_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_remove_ip_from_vlan_ip_not_found(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
<family>
<inet>
<address>
<name>4.4.4.2/27</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownIP):
self.switch.remove_ip_from_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_remove_ip_from_vlan_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>irb</name>
<unit>
<name>1234</name>
</unit>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration())
with self.assertRaises(UnknownVlan):
self.switch.remove_ip_from_vlan(vlan_number=1234, ip_network=IPNetwork("3.3.3.2/27"))
def test_unset_interface_access_vlan_removes_the_vlan_id(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/1</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id>999</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id operation="delete" />
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.unset_interface_access_vlan("xe-0/0/1")
def test_unset_interface_access_vlan_fails_when_not_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/1</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(AccessVlanNotSet) as expect:
self.switch.unset_interface_access_vlan("xe-0/0/1")
assert_that(str(expect.exception), equal_to("Access Vlan is not set on interface xe-0/0/1"))
def test_unset_interface_access_vlan_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/1</name>
</interface>
</interfaces>
</configuration>
</filter>
""")) .and_return(a_configuration())
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownInterface) as expect:
self.switch.unset_interface_access_vlan("xe-0/0/1")
assert_that(str(expect.exception), equal_to("Unknown interface xe-0/0/1"))
def test_get_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/1</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>xe-0/0/1</name>
<unit>
<name>0</name>
<family>
<bridge>
</bridge>
</family>
</unit>
</interface>
</interfaces>
<bridge-domains/>
"""))
interface = self.switch.get_interface('xe-0/0/1')
assert_that(interface.name, equal_to("xe-0/0/1"))
assert_that(interface.shutdown, equal_to(False))
assert_that(interface.port_mode, equal_to(ACCESS))
assert_that(interface.access_vlan, equal_to(None))
assert_that(interface.trunk_native_vlan, equal_to(None))
assert_that(interface.trunk_vlans, equal_to([]))
assert_that(interface.auto_negotiation, equal_to(None))
assert_that(interface.mtu, equal_to(None))
def test_get_interfaces_lists_configuration_less_interfaces(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
xe-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
<physical-interface>
<name>
xe-0/0/2
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<bridge-domains />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces />
<bridge-domains/>
"""))
if1, if2 = self.switch.get_interfaces()
assert_that(if1.name, equal_to("xe-0/0/1"))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if2.name, equal_to("xe-0/0/2"))
assert_that(if2.shutdown, equal_to(True))
def test_get_nonexistent_interface_raises(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/INEXISTENT</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<bridge-domains/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
xe-0/0/1
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(UnknownInterface) as expect:
self.switch.get_interface('xe-0/0/INEXISTENT')
assert_that(str(expect.exception), equal_to("Unknown interface xe-0/0/INEXISTENT"))
def test_get_unconfigured_interface_could_be_disabled(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/27</name>
</interface>
</interfaces>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<bridge-domains/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
xe-0/0/27
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
assert_that(self.switch.get_interface('xe-0/0/27').shutdown, equal_to(True))
def test_remove_trunk_vlan_removes_the_vlan_lists_in_every_possible_way(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>1000</vlan-id-list>
<vlan-id-list>1000-1001</vlan-id-list>
<vlan-id-list>999-1000</vlan-id-list>
<vlan-id-list>999-1001</vlan-id-list>
<vlan-id-list>998-1002</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list operation="delete">1000</vlan-id-list>
<vlan-id-list operation="delete">1000-1001</vlan-id-list>
<vlan-id-list>1001</vlan-id-list>
<vlan-id-list operation="delete">999-1000</vlan-id-list>
<vlan-id-list>999</vlan-id-list>
<vlan-id-list operation="delete">999-1001</vlan-id-list>
<vlan-id-list>999</vlan-id-list>
<vlan-id-list>1001</vlan-id-list>
<vlan-id-list operation="delete">998-1002</vlan-id-list>
<vlan-id-list>998-999</vlan-id-list>
<vlan-id-list>1001-1002</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
def test_remove_trunk_vlan_removes_the_vlan_even_if_referenced_by_name(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>1000</vlan-id-list>
<vlan-id-list>VLAN_NAME</vlan-id-list>
<vlan-id-list>SOEMTHING</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id-list operation="delete">1000</vlan-id-list>
<vlan-id-list operation="delete">VLAN_NAME</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
def test_remove_trunk_vlan_not_in_lists_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>trunk</interface-mode>
<vlan-id-list>500-999</vlan-id-list>
<vlan-id-list>1001-4000</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(TrunkVlanNotSet) as expect:
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Trunk Vlan is not set on interface xe-0/0/6"))
def test_remove_trunk_vlan_on_access_with_the_correct_vlan_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<interface-mode>access</interface-mode>
<vlan-id-list>1000</vlan-id-list>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_remove_trunk_vlan_on_no_port_mode_interface_with_the_correct_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<name>0</name>
<family>
<bridge>
<vlan-id>1000</vlan-id>
</bridge>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_remove_trunk_vlan_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<bridge-domains/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<bridge-domains>
<domain>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</domain>
</bridge-domains>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownInterface) as expect:
self.switch.remove_trunk_vlan("xe-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Unknown interface xe-0/0/6"))
| 36.646295 | 183 | 0.427454 | 10,243 | 124,121 | 4.998243 | 0.038953 | 0.028947 | 0.037502 | 0.051683 | 0.923042 | 0.909194 | 0.89308 | 0.879427 | 0.861593 | 0.847765 | 0 | 0.030854 | 0.452421 | 124,121 | 3,386 | 184 | 36.657118 | 0.72242 | 0.004399 | 0 | 0.907614 | 0 | 0.002867 | 0.730813 | 0.091064 | 0 | 0 | 0 | 0 | 0.043963 | 1 | 0.023893 | false | 0.000637 | 0.004779 | 0 | 0.02899 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e02af33e643fcf659c71d05794003b1bc2776026 | 16,206 | py | Python | tests/test_workers.py | thiagolcmelo/dspreview | b20e7e3788f7d7cb7c44b17fc1c8e8a87c45f41e | [
"MIT"
] | null | null | null | tests/test_workers.py | thiagolcmelo/dspreview | b20e7e3788f7d7cb7c44b17fc1c8e8a87c45f41e | [
"MIT"
] | null | null | null | tests/test_workers.py | thiagolcmelo/dspreview | b20e7e3788f7d7cb7c44b17fc1c8e8a87c45f41e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Test workers, how they should extract stuff, how they should parse .csv
files, and how they should store it in the database.
"""
# python standard
import unittest
import logging
from unittest.mock import patch
# third-party imports
import pandas as pd
import numpy as np
# local imports
from utils.bucket_helper import BucketHelper
from workers.worker import Worker, DcmWorker, DspWorker
logging.disable(logging.CRITICAL)
class TestWorkers(unittest.TestCase):
def setUp(self):
"""
Some examples of information that could be found in the bucket
"""
# a fake list of files
self.fake_list = [
{
'name': 'archive/',
'contentType': 'application/x-www-form-urlencoded',
'size': '0'
}, {
'name': 'dbm.csv',
'contentType': 'text/csv',
'size': '21645'
}, {
'name': 'dcm.csv',
'contentType': 'text/csv',
'size': '48297'
}, {
'name': 'mediamath.csv',
'contentType': 'text/csv',
'size': '22989'
}
]
# first two lines of a good dcm's file
self.good_dcm = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "48988.5",
"clicks": "118",
"reach": "48.24"
}, {
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "198706",
"placement": "dbm_youtube",
"impressions": "55509.0",
"clicks": "116",
"reach": "42.49"
}])
# first two line of a file with data missing, will be filled with zero
self.missing_fine_dcm = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": np.nan,
"clicks": "",
"reach": ""
}, {
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "198706",
"placement": "dbm_youtube",
"impressions": "",
"clicks": "116",
"reach": "42.49"
}])
# first two line of a file with data missing, in this case, dimensions
# are missing
self.missing_bad_dcm = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "48988.5",
"clicks": "118",
"reach": "48.24"
}, {
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "198706",
"placement": "",
"impressions": "55509.0",
"clicks": "116",
"reach": "42.49"
}])
# a file with a line repeated, it is expected to be deduplicated
self.duplicate_dcm = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "48988.5",
"clicks": "118",
"reach": "48.24"
}, {
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "48988.5",
"clicks": "118",
"reach": "48.24"
}])
# a file with same dimensions values and different metrics values
self.bad_dcm = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "48988.5",
"clicks": "118",
"reach": "48.24"
}, {
"date": "2018-01-01",
"campaign_id": "86394",
"campaign": "acme_asprin",
"placement_id": "267821",
"placement": "mediamath_programmatic",
"impressions": "648712.9",
"clicks": "876",
"reach": "87.54"
}])
# first two lines of a good dsp's file
self.good_dsp = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6011070",
"clicks": "11889",
"cost": "40334.2797",
}, {
"date": "2018-01-01",
"campaign_id": "111493",
"campaign": "acme_car_youtube",
"impressions": "6585720",
"clicks": "29843",
"cost": "58547.0508",
}])
# a file with a line repeated, it is expected to be deduplicated
self.duplicate_dsp = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6011070",
"clicks": "11889",
"cost": "40334.2797",
}, {
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6011070",
"clicks": "11889",
"cost": "40334.2797",
}])
# first two line of a file with data missing, will be filled with zero
self.missing_fine_dsp = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6011070",
"clicks": "",
"cost": "40334.2797",
}, {
"date": "2018-01-01",
"campaign_id": "111493",
"campaign": "acme_car_youtube",
"impressions": "",
"clicks": "29843",
"cost": "",
}])
# first two line of a file with data missing, in this case, dimensions
# are missing
self.missing_bad_dsp = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "",
"impressions": "6011070",
"clicks": "11889",
"cost": "40334.2797",
}, {
"date": "2018-01-01",
"campaign_id": "111493",
"campaign": "acme_car_youtube",
"impressions": "6585720",
"clicks": "29843",
"cost": "58547.0508",
}])
# a file with same dimensions values and different metrics values
self.bad_dsp = pd.DataFrame([{
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6011070",
"clicks": "11889",
"cost": "40334.2797",
}, {
"date": "2018-01-01",
"campaign_id": "128115",
"campaign": "acme_asprin_youtube",
"impressions": "6585720",
"clicks": "29843",
"cost": "58547.0508",
}])
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_extract(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = pd.DataFrame([{'a': 1, 'b': 3}])
worker = Worker()
worker.extract('^dcm.*')
mock_get_file.assert_called_with('dcm.csv')
worker = Worker()
worker.extract('^dbm.*')
mock_get_file.assert_called_with('dbm.csv')
worker = Worker()
worker.extract('^mediamath.*')
mock_get_file.assert_called_with('mediamath.csv')
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dcm_good(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.good_dcm
worker = DcmWorker()
worker.extract('^dcm.*')
worker.parse()
self.assertTrue(len(worker.dfs) == 1, "There should be one DataFrame!")
parsed_df = worker.dfs[0]
columns_has = parsed_df.columns
columns_should = [
'date',
'campaign_id',
'campaign',
'placement_id',
'placement',
'impressions',
'clicks',
'reach'
]
self.assertListEqual(sorted(columns_has), sorted(columns_should), """
Missing some column! They should be:
- date
- campaign_id
- campaign
- placement_id
- placement
- impressions
- clicks
- reach
""")
self.assertEqual(parsed_df.date.dtype, np.dtype('datetime64[ns]'),
"""date should be parsed to np.datetime64""")
self.assertEqual(parsed_df.campaign_id.dtype, np.dtype('int64'),
"""campaign_id should be parsed to np.int64""")
self.assertEqual(parsed_df.placement_id.dtype, np.dtype('int64'),
"""placement_id should be parsed to np.int64""")
self.assertEqual(parsed_df.impressions.dtype, np.dtype('float64'),
"""impressions should be parsed to np.float64""")
self.assertEqual(parsed_df.clicks.dtype, np.dtype('int64'),
"""clicks should be parsed to np.int64""")
self.assertEqual(parsed_df.reach.dtype, np.dtype('float64'),
"""reach should be parsed to np.float64""")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dcm_missing(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.missing_fine_dcm
worker = DcmWorker()
worker.extract()
worker.parse()
self.assertTrue(len(worker.dfs) == 1, "There should be on DataFrame!")
self.assertTrue(worker.dfs[0].loc[0].clicks ==
0, "Clicks should be zero")
self.assertTrue(worker.dfs[0].loc[0].reach ==
0, "Reach should be zero")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dcm_missing_bad(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.missing_bad_dcm
worker = DcmWorker()
worker.extract()
with self.assertRaises(Exception):
worker.parse()
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dcm_duplicate(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.duplicate_dcm
worker = DcmWorker()
worker.extract()
worker.parse()
self.assertTrue(worker.dfs[0].shape[0] == 1,
"The duplicate rows should be removed!")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dcm_bad_values(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.bad_dcm
worker = DcmWorker()
worker.extract()
# very bad approach...
working = True
try:
worker.parse()
working = False
except Exception:
pass
if not working:
self.assertFalse(True, """It should raise an exception since we
are feeding the worker with a bad dataframe. This dataframe
has rows with the same dimensions' combination and some
difference in the metrics values, this combination should be
unique""")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dsp_good(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.good_dsp
worker = DspWorker('dbm')
worker.extract()
worker.parse()
self.assertTrue(len(worker.dfs) == 1, "There should be one DataFrame!")
parsed_df = worker.dfs[0]
columns_has = parsed_df.columns
columns_should = [
'date',
'campaign_id',
'campaign',
'impressions',
'clicks',
'cost'
]
self.assertListEqual(sorted(columns_has), sorted(columns_should), """
Missing some column! They should be:
- date
- campaign_id
- campaign
- impressions
- clicks
- cost
""")
self.assertEqual(parsed_df.date.dtype, np.dtype('datetime64[ns]'),
"""date should be parsed to np.datetime64""")
self.assertEqual(parsed_df.campaign_id.dtype, np.dtype('int64'),
"""campaign_id should be parsed to np.int64""")
self.assertEqual(parsed_df.impressions.dtype, np.dtype('float64'),
"""impressions should be parsed to np.float64""")
self.assertEqual(parsed_df.clicks.dtype, np.dtype('int64'),
"""clicks should be parsed to np.int64""")
self.assertEqual(parsed_df.cost.dtype, np.dtype('float64'),
"""reach should be parsed to np.float64""")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dsp_missing(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.missing_fine_dsp
worker = DspWorker('dbm')
worker.extract()
worker.parse()
self.assertEqual(len(worker.dfs), 1, "There should be on DataFrame!")
self.assertEqual(worker.dfs[0].loc[0].clicks, 0,
"Clicks should be zero")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dsp_missing_bad(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.missing_bad_dsp
worker = DspWorker('dbm')
worker.extract()
with self.assertRaises(Exception):
worker.parse()
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dsp_duplicate(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.duplicate_dsp
worker = DspWorker('dbm')
worker.extract()
worker.parse()
self.assertEqual(worker.dfs[0].shape[0], 1,
"The duplicate rows should be removed!")
@patch.object(BucketHelper, 'list_files')
@patch.object(BucketHelper, 'get_csv_file')
def test_parse_dsp_bad_values(self, mock_get_file, mock_list_files):
mock_list_files.return_value = self.fake_list
mock_get_file.return_value = self.bad_dsp
worker = DspWorker('dbm')
worker.extract()
with self.assertRaises(Exception):
worker.parse()
| 35.774834 | 79 | 0.540417 | 1,695 | 16,206 | 4.970501 | 0.121534 | 0.035252 | 0.032641 | 0.028487 | 0.873353 | 0.850564 | 0.830386 | 0.825875 | 0.812344 | 0.801068 | 0 | 0.06588 | 0.33031 | 16,206 | 452 | 80 | 35.853982 | 0.710403 | 0.057386 | 0 | 0.706494 | 0 | 0 | 0.269693 | 0.012687 | 0 | 0 | 0 | 0 | 0.075325 | 1 | 0.031169 | false | 0.002597 | 0.018182 | 0 | 0.051948 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e052da91596999619d159bdcd01705951981bb01 | 203 | py | Python | src/data_sci/fastai/groups/default_cnn.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | null | null | null | src/data_sci/fastai/groups/default_cnn.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | 8 | 2020-03-24T15:29:05.000Z | 2022-02-10T00:14:06.000Z | src/data_sci/fastai/groups/default_cnn.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | null | null | null | from fastai.imports import *
from fastai.transforms import *
from fastai.conv_learner import *
from fastai.model import *
from fastai.dataset import *
from fastai.sgdr import *
from fastai.plots import * | 29 | 33 | 0.79803 | 29 | 203 | 5.551724 | 0.37931 | 0.434783 | 0.596273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133005 | 203 | 7 | 34 | 29 | 0.914773 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e06bd7e7f98e96bbb236b10195f6dc1e5f69e085 | 48 | py | Python | medimodule/Kidney/kidney_tumor_segmentation/models/cascade_2nd/__init__.py | daeun02/MI2RLNet | 55f32e3908dc1d5fa6100f9d9fccd23a2636adbb | [
"Apache-2.0"
] | null | null | null | medimodule/Kidney/kidney_tumor_segmentation/models/cascade_2nd/__init__.py | daeun02/MI2RLNet | 55f32e3908dc1d5fa6100f9d9fccd23a2636adbb | [
"Apache-2.0"
] | null | null | null | medimodule/Kidney/kidney_tumor_segmentation/models/cascade_2nd/__init__.py | daeun02/MI2RLNet | 55f32e3908dc1d5fa6100f9d9fccd23a2636adbb | [
"Apache-2.0"
] | null | null | null | from .model_1 import *
from .model_2_5 import * | 24 | 24 | 0.75 | 9 | 48 | 3.666667 | 0.666667 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075 | 0.166667 | 48 | 2 | 24 | 24 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e0739d04af932ff770aba6a1eaba06e30397591d | 48,654 | py | Python | linux/lib/python2.7/dist-packages/samba/tests/dns.py | nmercier/linux-cross-gcc | a5b0028fd2b72ec036a4725e93ba29d73cb753a6 | [
"BSD-3-Clause"
] | 3 | 2015-10-31T10:39:25.000Z | 2019-04-27T20:19:33.000Z | linux/lib/python2.7/dist-packages/samba/tests/dns.py | nmercier/linux-cross-gcc | a5b0028fd2b72ec036a4725e93ba29d73cb753a6 | [
"BSD-3-Clause"
] | null | null | null | linux/lib/python2.7/dist-packages/samba/tests/dns.py | nmercier/linux-cross-gcc | a5b0028fd2b72ec036a4725e93ba29d73cb753a6 | [
"BSD-3-Clause"
] | null | null | null | # Unix SMB/CIFS implementation.
# Copyright (C) Kai Blin <kai@samba.org> 2011
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import struct
import random
import socket
import samba.ndr as ndr
from samba import credentials, param
from samba.tests import TestCase
from samba.dcerpc import dns, dnsp, dnsserver
from samba.netcmd.dns import TXTRecord, dns_record_match, data_to_dns_record
from samba.tests.subunitrun import SubunitOptions, TestProgram
import samba.getopt as options
import optparse
parser = optparse.OptionParser("dns.py <server name> <server ip> [options]")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
# This timeout only has relevance when testing against Windows
# Format errors tend to return patchy responses, so a timeout is needed.
parser.add_option("--timeout", type="int", dest="timeout",
help="Specify timeout for DNS requests")
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
subunitopts = SubunitOptions(parser)
parser.add_option_group(subunitopts)
opts, args = parser.parse_args()
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
timeout = opts.timeout
if len(args) < 2:
parser.print_usage()
sys.exit(1)
server_name = args[0]
server_ip = args[1]
creds.set_krb_forwardable(credentials.NO_KRB_FORWARDABLE)
def make_txt_record(records):
rdata_txt = dns.txt_record()
s_list = dnsp.string_list()
s_list.count = len(records)
s_list.str = records
rdata_txt.txt = s_list
return rdata_txt
class DNSTest(TestCase):
def setUp(self):
global server, server_ip, lp, creds
super(DNSTest, self).setUp()
self.server = server_name
self.server_ip = server_ip
self.lp = lp
self.creds = creds
def errstr(self, errcode):
"Return a readable error code"
string_codes = [
"OK",
"FORMERR",
"SERVFAIL",
"NXDOMAIN",
"NOTIMP",
"REFUSED",
"YXDOMAIN",
"YXRRSET",
"NXRRSET",
"NOTAUTH",
"NOTZONE",
]
return string_codes[errcode]
def assert_dns_rcode_equals(self, packet, rcode):
"Helper function to check return code"
p_errcode = packet.operation & 0x000F
self.assertEquals(p_errcode, rcode, "Expected RCODE %s, got %s" %
(self.errstr(rcode), self.errstr(p_errcode)))
def assert_dns_opcode_equals(self, packet, opcode):
"Helper function to check opcode"
p_opcode = packet.operation & 0x7800
self.assertEquals(p_opcode, opcode, "Expected OPCODE %s, got %s" %
(opcode, p_opcode))
def make_name_packet(self, opcode, qid=None):
"Helper creating a dns.name_packet"
p = dns.name_packet()
if qid is None:
p.id = random.randint(0x0, 0xffff)
p.operation = opcode
p.questions = []
return p
def finish_name_packet(self, packet, questions):
"Helper to finalize a dns.name_packet"
packet.qdcount = len(questions)
packet.questions = questions
def make_name_question(self, name, qtype, qclass):
"Helper creating a dns.name_question"
q = dns.name_question()
q.name = name
q.question_type = qtype
q.question_class = qclass
return q
def get_dns_domain(self):
"Helper to get dns domain"
return self.creds.get_realm().lower()
def dns_transaction_udp(self, packet, host=server_ip,
dump=False, timeout=timeout):
"send a DNS query and read the reply"
s = None
try:
send_packet = ndr.ndr_pack(packet)
if dump:
print self.hexdump(send_packet)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
s.settimeout(timeout)
s.connect((host, 53))
s.send(send_packet, 0)
recv_packet = s.recv(2048, 0)
if dump:
print self.hexdump(recv_packet)
return ndr.ndr_unpack(dns.name_packet, recv_packet)
finally:
if s is not None:
s.close()
def dns_transaction_tcp(self, packet, host=server_ip,
dump=False, timeout=timeout):
"send a DNS query and read the reply"
s = None
try:
send_packet = ndr.ndr_pack(packet)
if dump:
print self.hexdump(send_packet)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.settimeout(timeout)
s.connect((host, 53))
tcp_packet = struct.pack('!H', len(send_packet))
tcp_packet += send_packet
s.send(tcp_packet, 0)
recv_packet = s.recv(0xffff + 2, 0)
if dump:
print self.hexdump(recv_packet)
return ndr.ndr_unpack(dns.name_packet, recv_packet[2:])
finally:
if s is not None:
s.close()
def make_txt_update(self, prefix, txt_array):
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = "%s.%s" % (prefix, self.get_dns_domain())
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
rdata = make_txt_record(txt_array)
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
return p
def check_query_txt(self, prefix, txt_array):
name = "%s.%s" % (prefix, self.get_dns_domain())
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(name, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rdata.txt.str, txt_array)
class TestSimpleQueries(DNSTest):
def test_one_a_query(self):
"create a query packet containing one query record"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rdata,
self.server_ip)
def test_one_a_query_tcp(self):
"create a query packet containing one query record via TCP"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_tcp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rdata,
self.server_ip)
def test_one_mx_query(self):
"create a query packet causing an empty RCODE_OK answer"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_MX, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 0)
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "invalid-%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_MX, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXDOMAIN)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 0)
def test_two_queries(self):
"create a query packet containing two query records"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
questions.append(q)
name = "%s.%s" % ('bogusname', self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
try:
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_FORMERR)
except socket.timeout:
# Windows chooses not to respond to incorrectly formatted queries.
# Although this appears to be non-deterministic even for the same
# request twice, it also appears to be based on a how poorly the
# request is formatted.
pass
def test_qtype_all_query(self):
"create a QTYPE_ALL query"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_ALL, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
num_answers = 1
dc_ipv6 = os.getenv('SERVER_IPV6')
if dc_ipv6 is not None:
num_answers += 1
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, num_answers)
self.assertEquals(response.answers[0].rdata,
self.server_ip)
if dc_ipv6 is not None:
self.assertEquals(response.answers[1].rdata, dc_ipv6)
def test_qclass_none_query(self):
"create a QCLASS_NONE query"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_ALL, dns.DNS_QCLASS_NONE)
questions.append(q)
self.finish_name_packet(p, questions)
try:
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NOTIMP)
except socket.timeout:
# Windows chooses not to respond to incorrectly formatted queries.
# Although this appears to be non-deterministic even for the same
# request twice, it also appears to be based on a how poorly the
# request is formatted.
pass
# Only returns an authority section entry in BIND and Win DNS
# FIXME: Enable one Samba implements this feature
# def test_soa_hostname_query(self):
# "create a SOA query for a hostname"
# p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
# questions = []
#
# name = "%s.%s" % (os.getenv('SERVER'), self.get_dns_domain())
# q = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
# questions.append(q)
#
# self.finish_name_packet(p, questions)
# response = self.dns_transaction_udp(p)
# self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
# # We don't get SOA records for single hosts
# self.assertEquals(response.ancount, 0)
def test_soa_domain_query(self):
"create a SOA query for a domain"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = self.get_dns_domain()
q = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rdata.minimum, 3600)
class TestDNSUpdates(DNSTest):
def test_two_updates(self):
"create two update requests"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = "%s.%s" % (self.server, self.get_dns_domain())
u = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
updates.append(u)
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
try:
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_FORMERR)
except socket.timeout:
# Windows chooses not to respond to incorrectly formatted queries.
# Although this appears to be non-deterministic even for the same
# request twice, it also appears to be based on a how poorly the
# request is formatted.
pass
def test_update_wrong_qclass(self):
"create update with DNS_QCLASS_NONE"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_NONE)
updates.append(u)
self.finish_name_packet(p, updates)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NOTIMP)
def test_update_prereq_with_non_null_ttl(self):
"test update with a non-null TTL"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
prereqs = []
r = dns.res_rec()
r.name = "%s.%s" % (self.server, self.get_dns_domain())
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_NONE
r.ttl = 1
r.length = 0
prereqs.append(r)
p.ancount = len(prereqs)
p.answers = prereqs
try:
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_FORMERR)
except socket.timeout:
# Windows chooses not to respond to incorrectly formatted queries.
# Although this appears to be non-deterministic even for the same
# request twice, it also appears to be based on a how poorly the
# request is formatted.
pass
def test_update_prereq_with_non_null_length(self):
"test update with a non-null length"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
prereqs = []
r = dns.res_rec()
r.name = "%s.%s" % (self.server, self.get_dns_domain())
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_ANY
r.ttl = 0
r.length = 1
prereqs.append(r)
p.ancount = len(prereqs)
p.answers = prereqs
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXRRSET)
def test_update_prereq_nonexisting_name(self):
"test update with a nonexisting name"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
prereqs = []
r = dns.res_rec()
r.name = "idontexist.%s" % self.get_dns_domain()
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_ANY
r.ttl = 0
r.length = 0
prereqs.append(r)
p.ancount = len(prereqs)
p.answers = prereqs
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXRRSET)
def test_update_add_txt_record(self):
"test adding records works"
prefix, txt = 'textrec', ['"This is a test"']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
def test_delete_record(self):
"Test if deleting records works"
NAME = "deleterec.%s" % self.get_dns_domain()
# First, create a record to make sure we have a record to delete.
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = NAME
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
rdata = make_txt_record(['"This is a test"'])
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# Now check the record is around
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(NAME, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# Now delete the record
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = NAME
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_NONE
r.ttl = 0
r.length = 0xffff
rdata = make_txt_record(['"This is a test"'])
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# And finally check it's gone
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(NAME, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXDOMAIN)
def test_readd_record(self):
"Test if adding, deleting and then readding a records works"
NAME = "readdrec.%s" % self.get_dns_domain()
# Create the record
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = NAME
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
rdata = make_txt_record(['"This is a test"'])
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# Now check the record is around
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(NAME, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# Now delete the record
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = NAME
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_NONE
r.ttl = 0
r.length = 0xffff
rdata = make_txt_record(['"This is a test"'])
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# check it's gone
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(NAME, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXDOMAIN)
# recreate the record
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = NAME
r.rr_type = dns.DNS_QTYPE_TXT
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
rdata = make_txt_record(['"This is a test"'])
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
# Now check the record is around
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(NAME, dns.DNS_QTYPE_TXT, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
def test_update_add_mx_record(self):
"test adding MX records works"
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = "%s" % self.get_dns_domain()
r.rr_type = dns.DNS_QTYPE_MX
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
rdata = dns.mx_record()
rdata.preference = 10
rdata.exchange = 'mail.%s' % self.get_dns_domain()
r.rdata = rdata
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s" % self.get_dns_domain()
q = self.make_name_question(name, dns.DNS_QTYPE_MX, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assertEqual(response.ancount, 1)
ans = response.answers[0]
self.assertEqual(ans.rr_type, dns.DNS_QTYPE_MX)
self.assertEqual(ans.rdata.preference, 10)
self.assertEqual(ans.rdata.exchange, 'mail.%s' % self.get_dns_domain())
class TestComplexQueries(DNSTest):
def setUp(self):
super(TestComplexQueries, self).setUp()
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = "cname_test.%s" % self.get_dns_domain()
r.rr_type = dns.DNS_QTYPE_CNAME
r.rr_class = dns.DNS_QCLASS_IN
r.ttl = 900
r.length = 0xffff
r.rdata = "%s.%s" % (self.server, self.get_dns_domain())
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
def tearDown(self):
super(TestComplexQueries, self).tearDown()
p = self.make_name_packet(dns.DNS_OPCODE_UPDATE)
updates = []
name = self.get_dns_domain()
u = self.make_name_question(name, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
updates.append(u)
self.finish_name_packet(p, updates)
updates = []
r = dns.res_rec()
r.name = "cname_test.%s" % self.get_dns_domain()
r.rr_type = dns.DNS_QTYPE_CNAME
r.rr_class = dns.DNS_QCLASS_NONE
r.ttl = 0
r.length = 0xffff
r.rdata = "%s.%s" % (self.server, self.get_dns_domain())
updates.append(r)
p.nscount = len(updates)
p.nsrecs = updates
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
def test_one_a_query(self):
"create a query packet containing one query record"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "cname_test.%s" % self.get_dns_domain()
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 2)
self.assertEquals(response.answers[0].rr_type, dns.DNS_QTYPE_CNAME)
self.assertEquals(response.answers[0].rdata, "%s.%s" %
(self.server, self.get_dns_domain()))
self.assertEquals(response.answers[1].rr_type, dns.DNS_QTYPE_A)
self.assertEquals(response.answers[1].rdata,
self.server_ip)
class TestInvalidQueries(DNSTest):
def test_one_a_query(self):
"send 0 bytes follows by create a query packet containing one query record"
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
s.connect((self.server_ip, 53))
s.send("", 0)
finally:
if s is not None:
s.close()
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % (self.server, self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rdata,
self.server_ip)
def test_one_a_reply(self):
"send a reply instead of a query"
global timeout
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
name = "%s.%s" % ('fakefakefake', self.get_dns_domain())
q = self.make_name_question(name, dns.DNS_QTYPE_A, dns.DNS_QCLASS_IN)
print "asking for ", q.name
questions.append(q)
self.finish_name_packet(p, questions)
p.operation |= dns.DNS_FLAG_REPLY
s = None
try:
send_packet = ndr.ndr_pack(p)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.settimeout(timeout)
host=self.server_ip
s.connect((host, 53))
tcp_packet = struct.pack('!H', len(send_packet))
tcp_packet += send_packet
s.send(tcp_packet, 0)
recv_packet = s.recv(0xffff + 2, 0)
self.assertEquals(0, len(recv_packet))
except socket.timeout:
# Windows chooses not to respond to incorrectly formatted queries.
# Although this appears to be non-deterministic even for the same
# request twice, it also appears to be based on a how poorly the
# request is formatted.
pass
finally:
if s is not None:
s.close()
class TestZones(DNSTest):
def setUp(self):
super(TestZones, self).setUp()
self.zone = "test.lan"
self.rpc_conn = dnsserver.dnsserver("ncacn_ip_tcp:%s[sign]" % (self.server_ip),
self.lp, self.creds)
def tearDown(self):
super(TestZones, self).tearDown()
try:
self.delete_zone(self.zone)
except RuntimeError, (num, string):
if num != 9601: #WERR_DNS_ERROR_ZONE_DOES_NOT_EXIST
raise
def create_zone(self, zone):
zone_create = dnsserver.DNS_RPC_ZONE_CREATE_INFO_LONGHORN()
zone_create.pszZoneName = zone
zone_create.dwZoneType = dnsp.DNS_ZONE_TYPE_PRIMARY
zone_create.fAllowUpdate = dnsp.DNS_ZONE_UPDATE_SECURE
zone_create.fAging = 0
zone_create.dwDpFlags = dnsserver.DNS_DP_DOMAIN_DEFAULT
self.rpc_conn.DnssrvOperation2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0,
self.server_ip,
None,
0,
'ZoneCreate',
dnsserver.DNSSRV_TYPEID_ZONE_CREATE,
zone_create)
def delete_zone(self, zone):
self.rpc_conn.DnssrvOperation2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0,
self.server_ip,
zone,
0,
'DeleteZoneFromDs',
dnsserver.DNSSRV_TYPEID_NULL,
None)
def test_soa_query(self):
zone = "test.lan"
p = self.make_name_packet(dns.DNS_OPCODE_QUERY)
questions = []
q = self.make_name_question(zone, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN)
questions.append(q)
self.finish_name_packet(p, questions)
response = self.dns_transaction_udp(p)
# Windows returns OK while BIND logically seems to return NXDOMAIN
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXDOMAIN)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 0)
self.create_zone(zone)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 1)
self.assertEquals(response.answers[0].rr_type, dns.DNS_QTYPE_SOA)
self.delete_zone(zone)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_NXDOMAIN)
self.assert_dns_opcode_equals(response, dns.DNS_OPCODE_QUERY)
self.assertEquals(response.ancount, 0)
class TestRPCRoundtrip(DNSTest):
def setUp(self):
super(TestRPCRoundtrip, self).setUp()
self.rpc_conn = dnsserver.dnsserver("ncacn_ip_tcp:%s[sign]" % (self.server_ip),
self.lp, self.creds)
def tearDown(self):
super(TestRPCRoundtrip, self).tearDown()
def test_update_add_txt_rpc_to_dns(self):
prefix, txt = 'rpctextrec', ['"This is a test"']
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"\\"This is a test\\""')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
def test_update_add_null_padded_txt_record(self):
"test adding records works"
prefix, txt = 'pad1textrec', ['"This is a test"', '', '']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"\\"This is a test\\"" "" ""'))
prefix, txt = 'pad2textrec', ['"This is a test"', '', '', 'more text']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"\\"This is a test\\"" "" "" "more text"'))
prefix, txt = 'pad3textrec', ['', '', '"This is a test"']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"" "" "\\"This is a test\\""'))
def test_update_add_padding_rpc_to_dns(self):
prefix, txt = 'pad1textrec', ['"This is a test"', '', '']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"\\"This is a test\\"" "" ""')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
prefix, txt = 'pad2textrec', ['"This is a test"', '', '', 'more text']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"\\"This is a test\\"" "" "" "more text"')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
prefix, txt = 'pad3textrec', ['', '', '"This is a test"']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"" "" "\\"This is a test\\""')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
# Test is incomplete due to strlen against txt records
def test_update_add_null_char_txt_record(self):
"test adding records works"
prefix, txt = 'nulltextrec', ['NULL\x00BYTE']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, ['NULL'])
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"NULL"'))
prefix, txt = 'nulltextrec2', ['NULL\x00BYTE', 'NULL\x00BYTE']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, ['NULL', 'NULL'])
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"NULL" "NULL"'))
def test_update_add_null_char_rpc_to_dns(self):
prefix, txt = 'nulltextrec', ['NULL\x00BYTE']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"NULL"')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, ['NULL'])
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
def test_update_add_hex_char_txt_record(self):
"test adding records works"
prefix, txt = 'hextextrec', ['HIGH\xFFBYTE']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"HIGH\xFFBYTE"'))
def test_update_add_hex_rpc_to_dns(self):
prefix, txt = 'hextextrec', ['HIGH\xFFBYTE']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"HIGH\xFFBYTE"')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
def test_update_add_slash_txt_record(self):
"test adding records works"
prefix, txt = 'slashtextrec', ['Th\\=is=is a test']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"Th\\\\=is=is a test"'))
# This test fails against Windows as it eliminates slashes in RPC
# One typical use for a slash is in records like 'var=value' to
# escape '=' characters.
def test_update_add_slash_rpc_to_dns(self):
prefix, txt = 'slashtextrec', ['Th\\=is=is a test']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '"Th\\\\=is=is a test"')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
def test_update_add_two_txt_records(self):
"test adding two txt records works"
prefix, txt = 'textrec2', ['"This is a test"',
'"and this is a test, too"']
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, '"\\"This is a test\\""' +
' "\\"and this is a test, too\\""'))
def test_update_add_two_rpc_to_dns(self):
prefix, txt = 'textrec2', ['"This is a test"',
'"and this is a test, too"']
prefix = 'rpc' + prefix
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT,
'"\\"This is a test\\""' +
' "\\"and this is a test, too\\""')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
def test_update_add_empty_txt_records(self):
"test adding two txt records works"
prefix, txt = 'emptytextrec', []
p = self.make_txt_update(prefix, txt)
response = self.dns_transaction_udp(p)
self.assert_dns_rcode_equals(response, dns.DNS_RCODE_OK)
self.check_query_txt(prefix, txt)
self.assertIsNotNone(dns_record_match(self.rpc_conn, self.server_ip,
self.get_dns_domain(),
"%s.%s" % (prefix, self.get_dns_domain()),
dnsp.DNS_TYPE_TXT, ''))
def test_update_add_empty_rpc_to_dns(self):
prefix, txt = 'rpcemptytextrec', []
name = "%s.%s" % (prefix, self.get_dns_domain())
rec = data_to_dns_record(dnsp.DNS_TYPE_TXT, '')
add_rec_buf = dnsserver.DNS_RPC_RECORD_BUF()
add_rec_buf.rec = rec
try:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, add_rec_buf, None)
self.check_query_txt(prefix, txt)
finally:
self.rpc_conn.DnssrvUpdateRecord2(dnsserver.DNS_CLIENT_VERSION_LONGHORN,
0, self.server_ip, self.get_dns_domain(),
name, None, add_rec_buf)
TestProgram(module=__name__, opts=subunitopts)
| 38.583664 | 95 | 0.605171 | 6,257 | 48,654 | 4.423046 | 0.070161 | 0.041409 | 0.039458 | 0.051454 | 0.817164 | 0.798085 | 0.780271 | 0.771563 | 0.759783 | 0.742439 | 0 | 0.005614 | 0.297118 | 48,654 | 1,260 | 96 | 38.614286 | 0.803643 | 0.067148 | 0 | 0.733129 | 0 | 0 | 0.065831 | 0.000927 | 0 | 0 | 0.00192 | 0.000794 | 0.09816 | 0 | null | null | 0.005112 | 0.013292 | null | null | 0.013292 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0ebe6d26f534e97509b97cdc2ba54888a4000b20 | 4,315 | py | Python | Sub/Content_Block.py | LoneTHydra/TelegramBot | 0928764c3917941b66905ee0465ee546aa21165c | [
"Unlicense"
] | null | null | null | Sub/Content_Block.py | LoneTHydra/TelegramBot | 0928764c3917941b66905ee0465ee546aa21165c | [
"Unlicense"
] | null | null | null | Sub/Content_Block.py | LoneTHydra/TelegramBot | 0928764c3917941b66905ee0465ee546aa21165c | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
import telebot
import libs
import RS.Response as Response
from libs import Constant
from telebot.types import Message
bot = telebot.TeleBot(Constant.Token)
def Text(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Admins or Constant.Text:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def AVNV(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Admins or Constant.AVNV:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def Doc(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Admins or Constant.Doc:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def Photo(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Admins or Constant.Photo:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def Sticker(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id,
message.from_user.id).status in Constant.Type_Admins or Constant.Sticker:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def LC(message: Message):
if message.chat.type in Constant.Type_Groups:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Admins or Constant.LC:
if bot.get_chat_member(message.chat.id, message.from_user.id).status in Constant.Type_Restricted:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
else:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def NCM(message: Message):
if message.chat.type in Constant.Type_Groups and Constant.NCM:
# Response.Deleter(message.chat.id, message.message_id)
# It Need?
Response.NCM(message.chat.title, message.from_user.language_code, message.chat.id)
else:
pass
del message
def LCM(message: Message):
if message.chat.type in Constant.Type_Groups and not Constant.LCM:
Response.Deleter(message.chat.id, message.message_id)
else:
pass
del message
def NCT(message: Message):
if message.chat.type in Constant.Type_Groups and Constant.NCT:
Response.NCT(message.chat.title, message.from_user.language_code, message.chat.id)
# Response.Deleter(message.chat.id, message.message_id)
# Is need?
else:
pass
del message
| 22.710526 | 119 | 0.653071 | 570 | 4,315 | 4.810526 | 0.087719 | 0.160467 | 0.137491 | 0.196937 | 0.900073 | 0.900073 | 0.892414 | 0.892414 | 0.860321 | 0.860321 | 0 | 0.000312 | 0.258169 | 4,315 | 189 | 120 | 22.830688 | 0.856295 | 0.034067 | 0 | 0.721649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.092784 | false | 0.154639 | 0.051546 | 0 | 0.14433 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
0ef2e2a0573affa0bfc97f27dc32d395103eeefb | 10,483 | py | Python | weighting_discussion/on_morph2/generate_prob_list_1.py | Xiejiu/second_age_estimation | 89e9ef371a07aba0bbba496697176381e4e9432c | [
"MIT"
] | 1 | 2021-09-27T06:34:03.000Z | 2021-09-27T06:34:03.000Z | weighting_discussion/on_morph2/generate_prob_list_1.py | Xiejiu/second_age_estimation | 89e9ef371a07aba0bbba496697176381e4e9432c | [
"MIT"
] | 1 | 2021-07-29T01:14:11.000Z | 2021-07-29T01:54:33.000Z | weighting_discussion/on_morph2/generate_prob_list_1.py | Xiejiu/second_age_estimation | 89e9ef371a07aba0bbba496697176381e4e9432c | [
"MIT"
] | 1 | 2021-09-27T07:14:48.000Z | 2021-09-27T07:14:48.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 29 13:23:52 2019
@author: xjc
"""
import os
import math
orig_S1_train_file='./S1_train_processed.txt'
orig_S1_valid_file='./S1_validation_processed.txt'
orig_S1_test_file='./S1_test_processed.txt'
new_S1_train_file_case1='./S1_train_probs_case1.txt'
new_S1_valid_file_case1='./S1_valid_probs_case1.txt'
new_S1_test_file_case1='./S1_test_probs_case1.txt'
new_S1_train_file_case3='./S1_train_probs_case3.txt'
new_S1_valid_file_case3='./S1_valid_probs_case3.txt'
new_S1_test_file_case3='./S1_test_probs_case3.txt'
new_S1_train_file_case4='./S1_train_probs_case4.txt'
new_S1_valid_file_case4='./S1_valid_probs_case4.txt'
new_S1_test_file_case4='./S1_test_probs_case4.txt'
if os.path.exists(new_S1_train_file_case1):
os.remove(new_S1_train_file_case1)
if os.path.exists(new_S1_valid_file_case1):
os.remove(new_S1_valid_file_case1)
if os.path.exists(new_S1_test_file_case1):
os.remove(new_S1_test_file_case1)
if os.path.exists(new_S1_train_file_case3):
os.remove(new_S1_train_file_case3)
if os.path.exists(new_S1_valid_file_case3):
os.remove(new_S1_valid_file_case3)
if os.path.exists(new_S1_test_file_case3):
os.remove(new_S1_test_file_case3)
if os.path.exists(new_S1_train_file_case4):
os.remove(new_S1_train_file_case4)
if os.path.exists(new_S1_valid_file_case4):
os.remove(new_S1_valid_file_case4)
if os.path.exists(new_S1_test_file_case4):
os.remove(new_S1_test_file_case4)
#####for train lsit#########################
with open(orig_S1_train_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.1
prob_g3=0.9
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.05
prob_g2=0.9
prob_g3=0.05
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.9
prob_g2=0.1
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_train_file_case1, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_train_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.3
prob_g3=0.7
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.15
prob_g2=0.7
prob_g3=0.15
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.7
prob_g2=0.3
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_train_file_case3, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_train_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.4
prob_g3=0.6
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.2
prob_g2=0.6
prob_g3=0.2
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.6
prob_g2=0.4
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_train_file_case4, 'a') as ff:
ff.write(tmp_record)
####for valid list ########
with open(orig_S1_valid_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.1
prob_g3=0.9
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.05
prob_g2=0.9
prob_g3=0.05
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.9
prob_g2=0.1
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_valid_file_case1, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_valid_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.3
prob_g3=0.7
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.15
prob_g2=0.7
prob_g3=0.15
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.7
prob_g2=0.3
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_valid_file_case3, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_valid_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.4
prob_g3=0.6
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.2
prob_g2=0.6
prob_g3=0.2
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.6
prob_g2=0.4
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_valid_file_case4, 'a') as ff:
ff.write(tmp_record)
#######for test list#################
with open(orig_S1_test_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.1
prob_g3=0.9
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.05
prob_g2=0.9
prob_g3=0.05
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.9
prob_g2=0.1
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_test_file_case1, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_test_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.3
prob_g3=0.7
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.15
prob_g2=0.7
prob_g3=0.15
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.7
prob_g2=0.3
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_test_file_case3, 'a') as ff:
ff.write(tmp_record)
with open(orig_S1_test_file) as f:
lines=f.readlines()
for line in lines:
img_name=line.split()[0]
img_age=int(line.split()[1])
img_label=img_name
for i in range(1,100):
if i<img_age:
prob_g1=0.0
prob_g2=0.4
prob_g3=0.6
tmp_class=2
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
elif i==img_age:
prob_g1=0.2
prob_g2=0.6
prob_g3=0.2
tmp_class=1
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
else:
prob_g1=0.6
prob_g2=0.4
prob_g3=0.0
tmp_class=0
img_label+=' '+str(prob_g1)+' '+str(prob_g2)+' '+str(prob_g3)+' '+str(tmp_class)
tmp_record=img_label+' '+str(img_age)+'\n'
with open(new_S1_test_file_case4, 'a') as ff:
ff.write(tmp_record) | 30.923304 | 92 | 0.549843 | 1,675 | 10,483 | 3.115224 | 0.049552 | 0.108662 | 0.075891 | 0.077616 | 0.947106 | 0.891913 | 0.852051 | 0.852051 | 0.790533 | 0.78555 | 0 | 0.073034 | 0.299914 | 10,483 | 339 | 93 | 30.923304 | 0.637962 | 0.012878 | 0 | 0.855634 | 0 | 0 | 0.043897 | 0.029881 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007042 | 0 | 0.007042 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0efa71dc407845ef171f149357f3985ce4bccecc | 255 | py | Python | sunpy/visualization/animator/__init__.py | johan12345/sunpy | 56e1ab0c2c992f99e0fe3e6bff468b731a51228c | [
"BSD-2-Clause"
] | 628 | 2015-01-14T17:34:10.000Z | 2022-03-29T06:07:50.000Z | sunpy/visualization/animator/__init__.py | johan12345/sunpy | 56e1ab0c2c992f99e0fe3e6bff468b731a51228c | [
"BSD-2-Clause"
] | 3,983 | 2015-01-03T11:16:21.000Z | 2022-03-31T16:55:38.000Z | sunpy/visualization/animator/__init__.py | johan12345/sunpy | 56e1ab0c2c992f99e0fe3e6bff468b731a51228c | [
"BSD-2-Clause"
] | 582 | 2015-01-14T10:09:24.000Z | 2022-03-29T06:07:12.000Z | from sunpy.visualization.animator.base import *
from sunpy.visualization.animator.image import *
from sunpy.visualization.animator.line import *
from sunpy.visualization.animator.mapsequenceanimator import *
from sunpy.visualization.animator.wcs import *
| 42.5 | 62 | 0.843137 | 30 | 255 | 7.166667 | 0.333333 | 0.209302 | 0.511628 | 0.697674 | 0.669767 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078431 | 255 | 5 | 63 | 51 | 0.914894 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
161ae0aaa9234466942e64acf5106b850a1be1b8 | 10,434 | py | Python | tests/unit/db/postgres/test_loader.py | ellyteitsworth/records-mover | 21cd56efc2d23cfff04ec1fdf582e5229546c418 | [
"Apache-2.0"
] | null | null | null | tests/unit/db/postgres/test_loader.py | ellyteitsworth/records-mover | 21cd56efc2d23cfff04ec1fdf582e5229546c418 | [
"Apache-2.0"
] | null | null | null | tests/unit/db/postgres/test_loader.py | ellyteitsworth/records-mover | 21cd56efc2d23cfff04ec1fdf582e5229546c418 | [
"Apache-2.0"
] | null | null | null | import unittest
import sqlalchemy
from records_mover.db.postgres.loader import PostgresLoader
from records_mover.records import DelimitedRecordsFormat
from mock import MagicMock, Mock, patch
class TestPostgresLoader(unittest.TestCase):
def setUp(self):
self.mock_url_resolver = Mock(name='url_resolver')
self.mock_meta = Mock(name='meta')
self.mock_db = MagicMock(name='db')
self.loader = PostgresLoader(self.mock_url_resolver,
self.mock_meta,
self.mock_db)
@patch('records_mover.db.postgres.loader.quote_value')
@patch('records_mover.db.postgres.loader.copy_from')
@patch('records_mover.db.postgres.loader.complain_on_unhandled_hints')
@patch('records_mover.db.postgres.loader.Table')
@patch('records_mover.db.postgres.loader.postgres_copy_from_options')
def test_load_from_fileobj(self,
mock_postgres_copy_from_options,
mock_Table,
mock_complain_on_unhandled_hints,
mock_copy_from,
mock_quote_value):
mock_schema = Mock(name='schema')
mock_table = Mock(name='table')
mock_load_plan = Mock(name='load_plan')
mock_fileobj = Mock(name='fileobj')
mock_records_format = Mock(name='records_format',
spec=DelimitedRecordsFormat)
mock_records_format.hints = {}
mock_load_plan.records_format = mock_records_format
mock_date_order_style = "DATE_ORDER_STYLE"
mock_postgres_options = {
'abc': 123
}
mock_postgres_copy_from_options.return_value = (
mock_date_order_style,
mock_postgres_options,
)
mock_quote_value.return_value = "ABC"
self.loader.load_from_fileobj(mock_schema,
mock_table,
mock_load_plan,
mock_fileobj)
mock_processing_instructions = mock_load_plan.processing_instructions
mock_unhandled_hints = set(mock_records_format.hints.keys())
mock_complain_on_unhandled_hints.\
assert_called_with(mock_processing_instructions.fail_if_dont_understand,
mock_unhandled_hints,
mock_records_format.hints)
mock_table_obj = mock_Table.return_value
mock_Table.assert_called_with(mock_table,
self.mock_meta,
schema=mock_schema,
autoload=True,
autoload_with=self.mock_db)
mock_conn = self.mock_db.engine.begin.return_value.__enter__.return_value
mock_quote_value.assert_called_with(mock_conn, 'ISO, DATE_ORDER_STYLE')
mock_conn.execute.assert_called_with('SET LOCAL DateStyle = ABC')
mock_copy_from.assert_called_with(mock_fileobj,
mock_table_obj,
mock_conn,
abc=123)
@patch('records_mover.db.postgres.loader.quote_value')
@patch('records_mover.db.postgres.loader.copy_from')
@patch('records_mover.db.postgres.loader.complain_on_unhandled_hints')
@patch('records_mover.db.postgres.loader.Table')
@patch('records_mover.db.postgres.loader.postgres_copy_from_options')
def test_load_from_fileobj_default_date_order_style(self,
mock_postgres_copy_from_options,
mock_Table,
mock_complain_on_unhandled_hints,
mock_copy_from,
mock_quote_value):
mock_schema = Mock(name='schema')
mock_table = Mock(name='table')
mock_load_plan = Mock(name='load_plan')
mock_fileobj = Mock(name='fileobj')
mock_records_format = Mock(name='records_format',
spec=DelimitedRecordsFormat)
mock_records_format.hints = {}
mock_load_plan.records_format = mock_records_format
mock_date_order_style = None
mock_postgres_options = {
'abc': 123
}
mock_postgres_copy_from_options.return_value = (
mock_date_order_style,
mock_postgres_options,
)
mock_quote_value.return_value = "ABC"
self.loader.load_from_fileobj(mock_schema,
mock_table,
mock_load_plan,
mock_fileobj)
mock_processing_instructions = mock_load_plan.processing_instructions
mock_unhandled_hints = set(mock_records_format.hints.keys())
mock_complain_on_unhandled_hints.\
assert_called_with(mock_processing_instructions.fail_if_dont_understand,
mock_unhandled_hints,
mock_records_format.hints)
mock_table_obj = mock_Table.return_value
mock_Table.assert_called_with(mock_table,
self.mock_meta,
schema=mock_schema,
autoload=True,
autoload_with=self.mock_db)
mock_conn = self.mock_db.engine.begin.return_value.__enter__.return_value
mock_quote_value.assert_called_with(mock_conn, 'ISO, MDY')
mock_conn.execute.assert_called_with('SET LOCAL DateStyle = ABC')
mock_copy_from.assert_called_with(mock_fileobj,
mock_table_obj,
mock_conn,
abc=123)
@patch('records_mover.db.loader.ConcatFiles')
@patch('records_mover.db.postgres.loader.quote_value')
@patch('records_mover.db.postgres.loader.copy_from')
@patch('records_mover.db.postgres.loader.complain_on_unhandled_hints')
@patch('records_mover.db.postgres.loader.Table')
@patch('records_mover.db.postgres.loader.postgres_copy_from_options')
def test_load(self,
mock_postgres_copy_from_options,
mock_Table,
mock_complain_on_unhandled_hints,
mock_copy_from,
mock_quote_value,
mock_ConcatFiles):
mock_directory = Mock(name='directory')
mock_url = Mock(name='url')
mock_directory.manifest_entry_urls.return_value = [mock_url]
mock_loc = MagicMock(name='loc')
self.mock_url_resolver.file_url.return_value = mock_loc
mock_schema = Mock(name='schema')
mock_table = Mock(name='table')
mock_load_plan = Mock(name='load_plan')
mock_records_format = Mock(name='records_format',
spec=DelimitedRecordsFormat)
mock_records_format.hints = {}
mock_load_plan.records_format = mock_records_format
mock_date_order_style = "DATE_ORDER_STYLE"
mock_postgres_options = {
'abc': 123
}
mock_postgres_copy_from_options.return_value = (
mock_date_order_style,
mock_postgres_options,
)
mock_quote_value.return_value = "ABC"
self.loader.load(mock_schema,
mock_table,
mock_load_plan,
mock_directory)
self.mock_url_resolver.file_url.assert_called_with(mock_url)
mock_processing_instructions = mock_load_plan.processing_instructions
mock_unhandled_hints = set(mock_records_format.hints.keys())
mock_complain_on_unhandled_hints.\
assert_called_with(mock_processing_instructions.fail_if_dont_understand,
mock_unhandled_hints,
mock_records_format.hints)
mock_table_obj = mock_Table.return_value
mock_Table.assert_called_with(mock_table,
self.mock_meta,
schema=mock_schema,
autoload=True,
autoload_with=self.mock_db)
mock_conn = self.mock_db.engine.begin.return_value.__enter__.return_value
mock_quote_value.assert_called_with(mock_conn, 'ISO, DATE_ORDER_STYLE')
mock_conn.execute.assert_called_with('SET LOCAL DateStyle = ABC')
mock_copy_from.assert_called_with(mock_ConcatFiles.return_value,
mock_table_obj,
mock_conn,
abc=123)
@patch('records_mover.db.postgres.loader.complain_on_unhandled_hints')
@patch('records_mover.db.postgres.loader.postgres_copy_from_options')
def test_can_load_this_format_true(self,
mock_postgres_copy_from_options,
mock_complain_on_unhandled_hints):
source_records_format = Mock(name='source_records_format',
spec=DelimitedRecordsFormat)
source_records_format.hints = {}
out = self.loader.can_load_this_format(source_records_format)
self.assertTrue(out)
def test_load_failure_exception(self):
self.assertEqual(sqlalchemy.exc.InternalError,
self.loader.load_failure_exception())
def test_best_scheme_to_load_from(self):
self.assertEqual('file',
self.loader.best_scheme_to_load_from())
@patch('records_mover.db.loader.TemporaryDirectory')
@patch('records_mover.db.loader.FilesystemDirectoryUrl')
def test_temporary_loadable_directory_loc(self,
mock_FilesystemDirectoryUrl,
mock_TemporaryDirectory):
with self.loader.temporary_loadable_directory_loc() as loc:
self.assertEqual(loc, mock_FilesystemDirectoryUrl.return_value)
| 48.985915 | 89 | 0.585394 | 1,061 | 10,434 | 5.295947 | 0.090481 | 0.057839 | 0.052322 | 0.067628 | 0.805837 | 0.771312 | 0.762057 | 0.755828 | 0.7496 | 0.7496 | 0 | 0.002642 | 0.347039 | 10,434 | 212 | 90 | 49.216981 | 0.822105 | 0 | 0 | 0.72449 | 0 | 0 | 0.126509 | 0.095074 | 0 | 0 | 0 | 0 | 0.102041 | 1 | 0.040816 | false | 0 | 0.02551 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
16530eb4002c9fa24248ae5932dd5bfcf6cb188c | 2,228 | py | Python | src/tests/env_utils_test.py | tomgilbertson/script-server-v1 | bbdf289d3d993a0c81f20c36bce5f3eb064b0261 | [
"Apache-2.0",
"CC0-1.0"
] | 833 | 2016-09-08T13:27:36.000Z | 2022-03-27T07:10:48.000Z | src/tests/env_utils_test.py | tomgilbertson/script-server-v1 | bbdf289d3d993a0c81f20c36bce5f3eb064b0261 | [
"Apache-2.0",
"CC0-1.0"
] | 528 | 2016-05-23T09:17:04.000Z | 2022-03-30T12:45:50.000Z | src/tests/env_utils_test.py | tomgilbertson/script-server-v1 | bbdf289d3d993a0c81f20c36bce5f3eb064b0261 | [
"Apache-2.0",
"CC0-1.0"
] | 214 | 2016-09-08T14:46:41.000Z | 2022-03-25T01:04:14.000Z | import unittest
from utils import env_utils
class TestIsMinVersion(unittest.TestCase):
def test_2_7_suitable_for_2_1_str(self):
self.assertTrue(env_utils.is_min_version('2.1', [2, 7]))
def test_2_7_suitable_for_2_1_float(self):
self.assertTrue(env_utils.is_min_version(2.1, [2, 7]))
def test_2_7_suitable_for_2_7_str(self):
self.assertTrue(env_utils.is_min_version('2.7', [2, 7]))
def test_2_7_suitable_for_2_7_float(self):
self.assertTrue(env_utils.is_min_version(2.7, [2, 7]))
def test_2_6_not_suitable_for_2_7_str(self):
self.assertFalse(env_utils.is_min_version('2.7', [2, 6]))
def test_2_6_not_suitable_for_2_7_float(self):
self.assertFalse(env_utils.is_min_version(2.7, [2, 6]))
def test_3_5_suitable_for_3_4_str(self):
self.assertTrue(env_utils.is_min_version('3.4', [3, 5]))
def test_3_5_suitable_for_3_4_float(self):
self.assertTrue(env_utils.is_min_version(3.4, [3, 5]))
def test_3_5_suitable_for_3_5_str(self):
self.assertTrue(env_utils.is_min_version('3.5', [3, 5]))
def test_3_5_suitable_for_3_5_float(self):
self.assertTrue(env_utils.is_min_version(3.5, [3, 5]))
def test_3_4_not_suitable_for_3_5_str(self):
self.assertFalse(env_utils.is_min_version('3.5', [3, 4]))
def test_3_4_not_suitable_for_3_5_float(self):
self.assertFalse(env_utils.is_min_version(3.5, [3, 4]))
def test_2_7_not_suitable_for_3_5_str(self):
self.assertFalse(env_utils.is_min_version('3.5', [2, 7]))
def test_2_7_not_suitable_for_3_5_float(self):
self.assertFalse(env_utils.is_min_version(3.5, [2, 7]))
def test_3_5_not_suitable_for_2_7_str(self):
self.assertFalse(env_utils.is_min_version('2.7', [3, 5]))
def test_3_5_not_suitable_for_2_7_float(self):
self.assertFalse(env_utils.is_min_version(2.7, [3, 5]))
def test_invalid_version(self):
self.assertFalse(env_utils.is_min_version('abc', [3, 5]))
def test_invalid_major_version(self):
self.assertFalse(env_utils.is_min_version('3a.5', [3, 5]))
def test_invalid_minor_version(self):
self.assertFalse(env_utils.is_min_version('3.5b', [3, 5]))
| 35.365079 | 66 | 0.7114 | 404 | 2,228 | 3.462871 | 0.084158 | 0.038599 | 0.135811 | 0.176555 | 0.933524 | 0.904932 | 0.904932 | 0.904932 | 0.877055 | 0.767691 | 0 | 0.073561 | 0.157989 | 2,228 | 62 | 67 | 35.935484 | 0.672175 | 0 | 0 | 0 | 0 | 0 | 0.015709 | 0 | 0 | 0 | 0 | 0 | 0.463415 | 1 | 0.463415 | false | 0 | 0.04878 | 0 | 0.536585 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
16b9bbce2d50763591b1e2fd47b330393358803b | 5,638 | py | Python | backend/readLabels.py | kevinniland97/Gesture-IO | 821d71dfccbd594dd1a667ecc155069755678a9c | [
"MIT"
] | 1 | 2020-03-25T16:01:02.000Z | 2020-03-25T16:01:02.000Z | backend/readLabels.py | kevinniland97/Gesture-IO | 821d71dfccbd594dd1a667ecc155069755678a9c | [
"MIT"
] | 8 | 2020-03-24T11:59:54.000Z | 2020-03-25T15:39:49.000Z | backend/readLabels.py | kevinniland97/Gesture-IO | 821d71dfccbd594dd1a667ecc155069755678a9c | [
"MIT"
] | null | null | null | import numpy as np
#creating labels
# 1 will match to blank
# 2 to open hand
# 3 to two fingers
temp = [1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4];
labels = np.asarray(temp, dtype='uint8')
print(labels.shape)
#save labels to file
f = open('dataset/data/labels.npy', 'w')
np.savetxt(f, labels, fmt='%s')
f.close()
checkLabels = np.loadtxt('dataset/data/labels.npy')
checkLabels = list(checkLabels)
#print(checkLabels)
#print(checkArray)
#print(array.shape) | 27.910891 | 51 | 0.372295 | 1,865 | 5,638 | 1.125469 | 0.021448 | 0.570748 | 0.854693 | 1.137685 | 0.857551 | 0.857551 | 0.857551 | 0.857551 | 0.857551 | 0.857551 | 0 | 0.455211 | 0.297091 | 5,638 | 202 | 52 | 27.910891 | 0.074439 | 0.025009 | 0 | 0.946809 | 0 | 0 | 0.009836 | 0.008379 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005319 | 0 | 0.005319 | 0.005319 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
bc3f43ccf0ba692a7310d52d3dd67431b8f4ed97 | 247 | py | Python | app/views/main.py | redperiabras/FILIPINEU | 833fd8d44c9d4de94d3433ca810a4a17831343ff | [
"MIT"
] | null | null | null | app/views/main.py | redperiabras/FILIPINEU | 833fd8d44c9d4de94d3433ca810a4a17831343ff | [
"MIT"
] | 1 | 2017-10-30T12:02:44.000Z | 2017-10-30T12:02:44.000Z | app/views/main.py | redperiabras/FILIPINEU | 833fd8d44c9d4de94d3433ca810a4a17831343ff | [
"MIT"
] | 1 | 2020-11-16T07:56:58.000Z | 2020-11-16T07:56:58.000Z | from flask import render_template, jsonify
from app import app
import random
@app.route('/')
def index():
return render_template('index.html', title='Home')
@app.route('/about')
def about():
return render_template('about.html', title='About')
| 20.583333 | 52 | 0.732794 | 35 | 247 | 5.085714 | 0.457143 | 0.235955 | 0.224719 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.109312 | 247 | 11 | 53 | 22.454545 | 0.809091 | 0 | 0 | 0 | 0 | 0 | 0.145749 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | true | 0 | 0.333333 | 0.222222 | 0.777778 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
bc7746f64e24d141bdc2e232e51bff73c9499c2b | 5,598 | py | Python | authok/v3/test/authentication/test_get_token.py | authok/authok-python | 9853544387c78744e0f376d9a0221280770fc78c | [
"MIT"
] | null | null | null | authok/v3/test/authentication/test_get_token.py | authok/authok-python | 9853544387c78744e0f376d9a0221280770fc78c | [
"MIT"
] | null | null | null | authok/v3/test/authentication/test_get_token.py | authok/authok-python | 9853544387c78744e0f376d9a0221280770fc78c | [
"MIT"
] | null | null | null | import unittest
import mock
from ...authentication.get_token import GetToken
class TestGetToken(unittest.TestCase):
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_authorization_code(self, mock_post):
g = GetToken('my.domain.com')
g.authorization_code(client_id='cid',
client_secret='clsec',
code='cd',
grant_type='gt',
redirect_uri='idt')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'clsec',
'code': 'cd',
'grant_type': 'gt',
'redirect_uri': 'idt'
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_authorization_code_pkce(self, mock_post):
g = GetToken('my.domain.com')
g.authorization_code_pkce(client_id='cid',
code_verifier='cdver',
code='cd',
grant_type='gt',
redirect_uri='idt')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'code_verifier': 'cdver',
'code': 'cd',
'grant_type': 'gt',
'redirect_uri': 'idt'
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_client_credentials(self, mock_post):
g = GetToken('my.domain.com')
g.client_credentials(client_id='cid',
client_secret='clsec',
audience='aud',
grant_type='gt')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'clsec',
'audience': 'aud',
'grant_type': 'gt'
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_login(self, mock_post):
g = GetToken('my.domain.com')
g.login(client_id='cid',
client_secret='clsec',
username='usrnm',
password='pswd',
scope='http://test.com/api',
realm='rlm',
audience='aud',
grant_type='gt')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'clsec',
'username': 'usrnm',
'password': 'pswd',
'scope': 'http://test.com/api',
'realm': 'rlm',
'audience': 'aud',
'grant_type': 'gt'
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_refresh_token(self, mock_post):
g = GetToken('my.domain.com')
g.refresh_token(client_id='cid',
client_secret='clsec',
refresh_token='rt',
grant_type='gt',
scope='s')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'clsec',
'refresh_token': 'rt',
'grant_type': 'gt',
'scope': 's'
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_passwordless_login_with_sms(self, mock_post):
g = GetToken('my.domain.com')
g.passwordless_login(
client_id='cid',
client_secret='csec',
username='123456',
otp='abcd',
realm='sms',
audience='aud',
scope='openid')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'csec',
'realm': 'sms',
'grant_type': 'http://authok.cn/oauth/grant-type/passwordless/otp',
'username': '123456',
'otp': 'abcd',
'audience': 'aud',
'scope': 'openid',
})
@mock.patch('authok.v3.authentication.get_token.GetToken.post')
def test_passwordless_login_with_email(self, mock_post):
g = GetToken('my.domain.com')
g.passwordless_login(
client_id='cid',
client_secret='csec',
username='a@b.c',
otp='abcd',
realm='email',
audience='aud',
scope='openid')
args, kwargs = mock_post.call_args
self.assertEqual(args[0], 'https://my.domain.com/oauth/token')
self.assertEqual(kwargs['data'], {
'client_id': 'cid',
'client_secret': 'csec',
'realm': 'email',
'grant_type': 'http://authok.cn/oauth/grant-type/passwordless/otp',
'username': 'a@b.c',
'otp': 'abcd',
'audience': 'aud',
'scope': 'openid',
})
| 31.449438 | 79 | 0.501786 | 564 | 5,598 | 4.803191 | 0.134752 | 0.041344 | 0.056848 | 0.075305 | 0.91842 | 0.91842 | 0.890365 | 0.890365 | 0.890365 | 0.853821 | 0 | 0.007163 | 0.351554 | 5,598 | 177 | 80 | 31.627119 | 0.739118 | 0 | 0 | 0.726619 | 0 | 0 | 0.264023 | 0.060021 | 0 | 0 | 0 | 0 | 0.100719 | 1 | 0.05036 | false | 0.057554 | 0.021583 | 0 | 0.079137 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
bc8893c1fa334309255751bfd9901605d7c086cc | 1,705 | py | Python | tests/data.py | pauperpythonistas/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 1 | 2017-12-16T00:39:20.000Z | 2017-12-16T00:39:20.000Z | tests/data.py | pauperpythonistas/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 34 | 2017-12-19T20:20:28.000Z | 2018-11-04T05:10:17.000Z | tests/data.py | greg-and-adam/python-geomark | edddafde990bdf42441e6adde77ea3d67a0790ee | [
"BSD-3-Clause"
] | 2 | 2017-12-19T19:39:59.000Z | 2018-11-01T02:53:15.000Z | import pytest
depends_create = [
"create_point_kml",
"create_line_kml",
"create_polygon_kml",
"create_point_geojson",
"create_line_geojson",
"create_polygon_geojson"
]
dependency_geo_files = [
pytest.mark.dependency(name="create_point_kml")({'format': 'kml', 'file': 'point.kml', 'geom_type': 'point'}),
pytest.mark.dependency(name="create_line_kml")({'format': 'kml', 'file': 'line.kml', 'geom_type': 'linestring'}),
pytest.mark.dependency(name="create_polygon_kml")({'format': 'kml', 'file': 'polygon.kml', 'geom_type': 'polygon'}),
pytest.mark.dependency(name="create_point_geojson")({'format': 'geojson', 'file': 'point.geojson', 'geom_type': 'point'}),
pytest.mark.dependency(name="create_line_geojson")({'format': 'geojson', 'file': 'line.geojson', 'geom_type': 'linestring'}),
pytest.mark.dependency(name="create_polygon_geojson")({'format': 'geojson', 'file': 'polygon.geojson', 'geom_type': 'polygon'})
]
geo_files = [
pytest.mark.dependency(depends=depends_create)({'format': 'kml', 'file': 'point.kml', 'geom_type': 'point'}),
pytest.mark.dependency(depends=depends_create)({'format': 'kml', 'file': 'line.kml', 'geom_type': 'linestring'}),
pytest.mark.dependency(depends=depends_create)({'format': 'kml', 'file': 'polygon.kml', 'geom_type': 'polygon'}),
pytest.mark.dependency(depends=depends_create)({'format': 'geojson', 'file': 'point.geojson', 'geom_type': 'point'}),
pytest.mark.dependency(depends=depends_create)({'format': 'geojson', 'file': 'line.geojson', 'geom_type': 'linestring'}),
pytest.mark.dependency(depends=depends_create)({'format': 'geojson', 'file': 'polygon.geojson', 'geom_type': 'polygon'})
]
| 56.833333 | 131 | 0.683284 | 201 | 1,705 | 5.567164 | 0.094527 | 0.107239 | 0.214477 | 0.128686 | 0.854334 | 0.840036 | 0.795353 | 0.795353 | 0.728329 | 0.609473 | 0 | 0 | 0.098534 | 1,705 | 29 | 132 | 58.793103 | 0.728042 | 0 | 0 | 0 | 0 | 0 | 0.429326 | 0.025806 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04 | 0 | 0.04 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bcd8df430ced586d8c49e1796d2d02e679121e32 | 93 | py | Python | tests/test_all.py | UlfR/jupyterlab_commands | ca32860501fd9ce7f9a850c728a85a3f7944d90a | [
"Apache-2.0"
] | 1 | 2020-01-25T00:36:08.000Z | 2020-01-25T00:36:08.000Z | tests/test_all.py | UlfR/jupyterlab_commands | ca32860501fd9ce7f9a850c728a85a3f7944d90a | [
"Apache-2.0"
] | null | null | null | tests/test_all.py | UlfR/jupyterlab_commands | ca32860501fd9ce7f9a850c728a85a3f7944d90a | [
"Apache-2.0"
] | 1 | 2020-01-25T00:24:25.000Z | 2020-01-25T00:24:25.000Z | # for Coverage
from jupyterlab_commands import *
from jupyterlab_commands.extension import *
| 23.25 | 43 | 0.83871 | 11 | 93 | 6.909091 | 0.636364 | 0.368421 | 0.578947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11828 | 93 | 3 | 44 | 31 | 0.926829 | 0.129032 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bce32265a0b82807a5e8c149d55421e694c82799 | 2,116 | py | Python | epytope/Data/pssms/bimas/mat/B_3701_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 7 | 2021-02-01T18:11:28.000Z | 2022-01-31T19:14:07.000Z | epytope/Data/pssms/bimas/mat/B_3701_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 22 | 2021-01-02T15:25:23.000Z | 2022-03-14T11:32:53.000Z | epytope/Data/pssms/bimas/mat/B_3701_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 4 | 2021-05-28T08:50:38.000Z | 2022-03-14T11:45:32.000Z | B_3701_9 = {0: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': -2.30258509299, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 1: {'A': 0.0, 'C': 0.0, 'E': 2.30258509299, 'D': 3.68887945411, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 2: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 3: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 4: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.405465108108, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.405465108108, 'Y': 0.0}, 5: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 6: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 7: {'A': 0.0, 'C': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 1.60943791243, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 1.60943791243, 'L': 1.60943791243, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 8: {'A': 0.0, 'C': 0.0, 'E': -2.30258509299, 'D': -2.30258509299, 'G': -2.30258509299, 'F': 0.69314718056, 'I': 2.30258509299, 'H': -2.30258509299, 'K': -2.30258509299, 'M': 0.69314718056, 'L': 2.30258509299, 'N': 0.0, 'Q': -2.30258509299, 'P': -2.30258509299, 'S': 0.0, 'R': -2.30258509299, 'T': 0.0, 'W': 0.0, 'V': 0.69314718056, 'Y': 0.69314718056}, -1: {'con': -2.30258509299}} | 2,116 | 2,116 | 0.375709 | 556 | 2,116 | 1.426259 | 0.068345 | 0.398487 | 0.034048 | 0.045397 | 0.669609 | 0.669609 | 0.669609 | 0.633039 | 0.622951 | 0.622951 | 0 | 0.349398 | 0.176276 | 2,116 | 1 | 2,116 | 2,116 | 0.105565 | 0 | 0 | 0 | 0 | 0 | 0.086443 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bcea3a70720e9cf27815b10e85c07eb6867e7947 | 26,696 | py | Python | models.py | engharat/SBADAGAN | c2ce620fbfb5e4612bc58fed7b52ea6cd591dd38 | [
"Apache-2.0"
] | 16 | 2018-04-12T10:56:00.000Z | 2021-11-26T05:09:18.000Z | models.py | engharat/SBADAGAN | c2ce620fbfb5e4612bc58fed7b52ea6cd591dd38 | [
"Apache-2.0"
] | null | null | null | models.py | engharat/SBADAGAN | c2ce620fbfb5e4612bc58fed7b52ea6cd591dd38 | [
"Apache-2.0"
] | 2 | 2019-07-25T10:46:11.000Z | 2020-04-28T07:50:31.000Z | import sys
import os
import numpy as np
import keras.backend as K
from keras.models import Model
from keras.layers import Input,merge
from keras.layers.merge import _Merge
from keras import initializers
from keras.initializers import RandomNormal
from keras.utils import vis_utils
from keras.layers.advanced_activations import LeakyReLU, ELU
from keras.activations import linear
from keras.layers.normalization import BatchNormalization
from keras.layers.core import Flatten, Dense, Activation, Reshape, Lambda, Dropout
from keras.layers.convolutional import Conv2D, Convolution2D, UpSampling2D, MaxPooling2D, Conv2DTranspose
from keras.layers.pooling import AveragePooling2D, GlobalAveragePooling2D
from keras.layers.noise import GaussianNoise
from keras.regularizers import *
from keras.applications.vgg16 import VGG16
from keras.constraints import unitnorm
from functools import partial
import tensorflow as tf
from normalization import *
import resnet50
def make_trainable(net, value):
net.trainable = value
for l in net.layers:
l.trainable = value
def wasserstein(y_true, y_pred):
return K.mean(y_true * y_pred)
def gradient_penalty_loss(y_true, y_pred, averaged_samples, gradient_penalty_weight):
gradients = K.gradients(y_pred, averaged_samples)
gradients = K.concatenate([K.flatten(tensor) for tensor in gradients])
gradient_l2_norm = K.sqrt(K.sum(K.square(gradients)))
gradient_penalty = gradient_penalty_weight * K.square(1 - gradient_l2_norm)
return K.mean(y_pred) - K.mean(y_pred) + gradient_penalty
def visualize_model(model):
model.summary()
vis_utils.plot_model(model,
to_file='./figures/%s.png' % model.name,
show_shapes=True,
show_layer_names=True)
def generator_google_mnistM(noise_dim, img_source_dim,img_dest_dim,deterministic,pureGAN,wd,suffix=None):
"""DCGAN generator based on Upsampling and Conv2D
Args:
noise_dim: Dimension of the noise input
img_dim: dimension of the image output
bn_mode: keras batchnorm mode
model_name: model name (default: {"generator_upsampling"})
dset: dataset (default: {"mnist"})
Returns:
keras model
"""
s = img_source_dim[1]
# shp = np.expand_dims(img_dim[1:],1) # to make shp= (None, 1, 28, 28) but is not working
start_dim = int(s / 4)
if K.image_dim_ordering() == "th":
input_channels = img_source_dim[0]
output_channels = img_dest_dim[0]
reshape_shape = (input_channels, s, s)
shp=reshape_shape
else:
input_channels = img_source_dim[-1]
output_channels = img_dest_dim[-1]
reshape_shape = (s, s, input_channels)
shp=reshape_shape
gen_noise_input = Input(shape=noise_dim, name="generator_input")
gen_image_input = Input(shape=shp, name="generator_image_input")
# Noise input and reshaping
x = Dense(5*s*s, input_dim=noise_dim,W_regularizer=l2(wd))(gen_noise_input)
x = Reshape((5,s,s))(x)
x = Activation("relu")(x)
if deterministic: #here I link or not link the noise vector to the whole network
g = gen_image_input
elif pureGAN:
g = x
else:
g = merge([gen_image_input, x], mode='concat',concat_axis=1) # because of concat_axis=1, will it work on tensorflow NHWC too?
x1 = Conv2D(64, (3, 3), border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(g) #convolved by 3x3 filter to get 64x55x35
x1 = Activation('relu')(x1)
for i in range(4):
x = Conv2D(64, (3, 3), border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(x1)
x=BatchNormGAN(axis=1)(x)
x = Activation('relu')(x)
x = Conv2D(64, (3, 3), border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(x)
x=BatchNormGAN(axis=1)(x)
x1 = merge([x, x1], mode='sum')
x1 = Activation('relu')(x1)
# Last Conv to get the output image
x1 = Conv2D(output_channels, (1, 1),name="gen_conv2d_final", border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(x1)
x1 = Activation('tanh')(x1)
if suffix is None:
generator_model = Model(input=[gen_noise_input,gen_image_input], output=[x1], name="generator_google1")
else:
generator_model = Model(input=[gen_noise_input,gen_image_input], output=[x1], name="generator_google2")
visualize_model(generator_model)
return generator_model
def generator_2048x7x7(noise_dim, img_source_dim,img_dest_dim,deterministic,pureGAN,wd,suffix=None):
"""DCGAN generator based on Upsampling and Conv2D
Args:
noise_dim: Dimension of the noise input
img_dim: dimension of the image output
bn_mode: keras batchnorm mode
model_name: model name (default: {"generator_upsampling"})
dset: dataset (default: {"mnist"})
Returns:
keras model
"""
s = img_source_dim[1]
# shp = np.expand_dims(img_dim[1:],1) # to make shp= (None, 1, 28, 28) but is not working
start_dim = int(s / 4)
if K.image_dim_ordering() == "th":
input_channels = img_source_dim[0]
output_channels = img_dest_dim[0]
reshape_shape = (input_channels, s, s)
shp=reshape_shape
else:
input_channels = img_source_dim[-1]
output_channels = img_dest_dim[-1]
reshape_shape = (s, s, input_channels)
shp=reshape_shape
gen_noise_input = Input(shape=noise_dim, name="generator_input")
gen_image_input = Input(shape=shp, name="generator_image_input")
# Noise input and reshaping
x = Dense(256*s*s, input_dim=noise_dim,W_regularizer=l2(wd))(gen_noise_input)
x = Reshape((256,s,s))(x)
x = Activation("relu")(x)
if deterministic: #here I link or not link the noise vector to the whole network
g = gen_image_input
elif pureGAN:
g = x
else:
g = merge([gen_image_input, x], mode='concat',concat_axis=1) # because of concat_axis=1, will it work on tensorflow NHWC too?
x1 = Conv2D(128, (7, 7),strides=(1,1), border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(g) #convolved by 3x3 filter to get 64x55x35
x1 = Activation('relu')(x1)
x1 = BatchNormGAN(axis=1)(x1)
x1 = Conv2D(2048, (7, 7),strides=(1,1), border_mode='same', kernel_initializer="he_normal",W_regularizer=l2(wd))(x1) #convolved by 3x3 filter to get 64x55x35
if suffix is None:
generator_model = Model(input=[gen_noise_input,gen_image_input], output=[x1], name="generator_google1")
else:
generator_model = Model(input=[gen_noise_input,gen_image_input], output=[x1], name="generator_google2")
visualize_model(generator_model)
return generator_model
def discriminator_google_mnistM(img_dim,wd):
disc_input = Input(shape=img_dim, name="discriminator_input")
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv1",border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(disc_input)
x=BatchNormGAN(axis=1)(x)
x = Dropout(0.1)(x)
x = LeakyReLU(0.2)(x)
x = GaussianNoise( sigma=0.2 )(x)
x = Conv2D(128, (3, 3), strides=(2, 2), name="conv2",border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x = Dropout(0.2)(x)
# x = LeakyReLU(0.2)(x)
x = GaussianNoise( sigma=0.2 )(x)
x = Conv2D(256, (3, 3), strides=(2, 2), name="conv3",border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x=BatchNormGAN(axis=1)(x)
x = Dropout(0.2)(x)
x = LeakyReLU(0.2)(x)
x = GaussianNoise( sigma=0.2 )(x)
x = Conv2D(512, (3, 3), strides=(2, 2), name="conv4",border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x = Dropout(0.2)(x)
# x = LeakyReLU(0.2)(x)
x = GaussianNoise( sigma=0.2 )(x)
x = Flatten()(x)
x = Dense(1, init=RandomNormal(stddev=0.02),activation='sigmoid', name='fc',W_regularizer=l2(wd))(x)
discriminator_model = Model(input=[disc_input], output=x, name="discriminator_google")
visualize_model(discriminator_model)
return discriminator_model
def discriminator_2048x7x7(img_dim,wd,n_classes,disc_type):
disc_input = Input(shape=img_dim, name="discriminator_input")
x = Conv2D(128, (7, 7), strides=(1, 1), name="conv1",border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(disc_input)
x=BatchNormGAN(axis=1)(x)
x = Dropout(0.1)(x)
x = LeakyReLU(0.2)(x)
x = GaussianNoise( sigma=0.2 )(x)
aux = x
x = Conv2D(1, (3, 3), strides=(1, 1), name="finale_conv",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
aux = Flatten()(aux)
aux = Dense(n_classes, activation='softmax', name='auxiliary',W_regularizer=l2(wd))(aux)
x = GlobalAveragePooling2D()(x)
discriminator_model_domain = Model(input=[disc_input], output=[x], name="discriminator_domain")
discriminator_model_class = Model(input=[disc_input], output=[aux], name="discriminator_class")
visualize_model(discriminator_model_domain)
visualize_model(discriminator_model_class)
return discriminator_model_domain, discriminator_model_class
def discriminator_dcgan(img_dim,wd,n_classes,disc_type):
min_s = img_dim[1]
disc_input = Input(shape=img_dim, name="discriminator_input")
# Get the list of number of conv filters
# (first layer starts with 64), filters are subsequently doubled
nb_conv =int(np.floor(np.log(min_s // 4) / np.log(2)))
list_f = [64 * min(8, (2 ** i)) for i in range(nb_conv)]
x = Conv2D(list_f[0], (3, 3), strides=(2, 2), name="disc_conv2d_1",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(disc_input)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
for i, f in enumerate(list_f[1:]):
name = "disc_conv2d_%s" % (i + 2)
x = Conv2D(f, (3, 3), strides=(2, 2), name=name,
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
x = Conv2D(1, (3, 3), strides=(1, 1), name="finale_conv",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
if disc_type == "nclass_disc":
aux = Flatten()(x)
aux = Dense(n_classes, activation='softmax', name='auxiliary',W_regularizer=l2(wd))(aux)
x = GlobalAveragePooling2D()(x)
discriminator_model = Model(input=[disc_input], output=[x,aux], name="discriminator")
elif disc_type == "simple_disc":
x = GlobalAveragePooling2D()(x)
discriminator_model = Model(input=[disc_input], output=[x], name="discriminator")
else:
print "ERROR, UNKNOWN DISCRIMINATOR"
visualize_model(discriminator_model)
return discriminator_model
def discriminator_dcgan_doubled(img_dim,wd,n_classes,disc_type):
min_s = img_dim[1]
disc_input = Input(shape=img_dim, name="discriminator_input")
# Get the list of number of conv filters
# (first layer starts with 64), filters are subsequently doubled
nb_conv =int(np.floor(np.log(min_s // 4) / np.log(2)))
list_f = [64 * min(8, (2 ** i)) for i in range(nb_conv)]
x = Conv2D(list_f[0], (3, 3), strides=(2, 2), name="disc_conv2d_1",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(disc_input)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
for i, f in enumerate(list_f[1:]):
name = "disc_conv2d_%s" % (i + 2)
x = Conv2D(f, (3, 3), strides=(2, 2), name=name,
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
aux = x
x = Conv2D(1, (3, 3), strides=(1, 1), name="finale_conv",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
aux = Flatten()(aux)
aux = Dense(n_classes, activation='softmax', name='auxiliary',W_regularizer=l2(wd))(aux)
x = GlobalAveragePooling2D()(x)
discriminator_model_domain = Model(input=[disc_input], output=[x], name="discriminator_domain")
discriminator_model_class = Model(input=[disc_input], output=[aux], name="discriminator_class")
visualize_model(discriminator_model_domain)
visualize_model(discriminator_model_class)
return discriminator_model_domain, discriminator_model_class
def discriminator_custom(img_dim,wd):
min_s = img_dim[1]
disc_input = Input(shape=img_dim, name="discriminator_input")
# Get the list of number of conv filters
# (first layer starts with 64), filters are subsequently doubled
nb_conv =int(np.floor(np.log(min_s // 4) / np.log(2)))
list_f = [64 * min(8, (2 ** i)) for i in range(nb_conv+1)]
x = Conv2D(list_f[0], (3, 3), strides=(2, 2), name="disc_conv2d_1",
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(disc_input)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
for i, f in enumerate(list_f[1:]):
name = "disc_conv2d_%s" % (i + 2)
x = Conv2D(f, (3, 3), strides=(2, 2), name=name,
border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
x=BatchNormalization(axis=1)(x)
x = LeakyReLU(0.2)(x)
#x = Conv2D(1, (3, 3), strides=(1, 1), name="finale_conv",
# border_mode="same", kernel_initializer=RandomNormal(stddev=0.02),kernel_regularizer=l2(wd))(x)
#x = GlobalAveragePooling2D()(x)
x = Dense(1, init=RandomNormal(stddev=0.02), name='fc',W_regularizer=l2(wd))(x)
discriminator_model = Model(input=[disc_input], output=x, name="discriminator")
visualize_model(discriminator_model)
return discriminator_model
def classificator_google_mnistM(img_dim,n_classes,wd):
input = Input(shape=img_dim, name="classifier_input")
x = Conv2D(32, (5, 5), strides=(1, 1), name="conv1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(input)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Conv2D(48, (5, 5), strides=(1, 1), name="conv2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Flatten()(x)
x = Dense(100, init="he_normal",activation="relu", name='fc1',W_regularizer=l2(wd))(x)
x = Dense(100, init="he_normal",activation="relu", name='fc2',W_regularizer=l2(wd))(x)
x = Dense(n_classes, init="he_normal",activation="softmax", name='fc_softmax',W_regularizer=l2(wd))(x)
classifier_model = Model(input=input,output=x,name="classifier")
visualize_model(classifier_model)
return classifier_model
def classificator_signs_relu(img_dim,n_classes,wd):
input = Input(shape=img_dim, name="classifier_input")
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(input)
x = Activation('relu')(x)
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Flatten()(x)
x = Dense(128, init="he_normal",activation="relu", name='fc1',W_regularizer=l2(wd))(x)
x = Dense(n_classes, init="he_normal",activation="softmax", name='fc_softmax',W_regularizer=l2(wd))(x)
classifier_model = Model(input=input,output=x,name="classifier")
visualize_model(classifier_model)
return classifier_model
def classificator_signs(img_dim,n_classes,wd):
input = Input(shape=img_dim, name="classifier_input")
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(input)
x = ELU()(x)
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = Conv2D(32, (3, 3), strides=(1, 1), name="conv1_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = Conv2D(64, (3, 3), strides=(1, 1), name="conv2_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_1",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_2",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = Conv2D(128, (3, 3), strides=(1, 1), name="conv3_3",border_mode="same", kernel_initializer="he_normal",kernel_regularizer=l2(wd))(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2,2))(x)
x = Flatten()(x)
x = Dense(128, init="he_normal",activation="relu", name='fc1',W_regularizer=l2(wd))(x)
x = Dense(n_classes, init="he_normal",activation="softmax", name='fc_softmax',W_regularizer=l2(wd))(x)
classifier_model = Model(input=input,output=x,name="classifier")
visualize_model(classifier_model)
return classifier_model
def resnet50classifier(img_dim,n_classes,wd):
drop=0.5
model_name="resnet"
_input = Input(shape=img_dim, name="classificator_input")
ResNet = resnet50.ResNet50(_input,Shape=img_dim,weights='imagenet')
make_trainable(ResNet, False)
x = Dropout(drop)(ResNet.output)
out = Dense(n_classes, activation='softmax',init="he_normal", name='fc',W_regularizer=l2(wd))(x)
resnet_model = Model(input=_input, output=out, name=model_name)
visualize_model(resnet_model)
return resnet_model
def classificator_2048x7x7(img_dim,n_classes,wd):
_input = Input(shape=img_dim, name="classificator_input")
x = Flatten()(_input)
out = Dense(n_classes, activation='softmax',init="he_normal", name='fc',W_regularizer=l2(wd))(x)
resnet_model = Model(input=_input, output=out, name="fc_classifier")
visualize_model(resnet_model)
return resnet_model
def GenToClassifierModel(generator, classifier, noise_dim, img_source_dim):
"""GEN + classifier model
Args:
generator: keras generator model
classifier: keras classifier model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator([noise_input,image_input])
y_pred = classifier(generated_image)
GenToClassifierModel = Model(input=[noise_input,image_input],
output=y_pred,
name="GenToClassifierModel")
visualize_model(GenToClassifierModel)
return GenToClassifierModel
def DCGAN(generator, discriminator, noise_dim, img_source_dim, img_dest_dim,monsterClass):
"""DCGAN generator + discriminator model
Args:
generator: keras generator model
discriminator: keras discriminator model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator([noise_input,image_input])
if monsterClass:
y_aux = discriminator(generated_image)
DCGAN = Model(input=[noise_input,image_input],
output=y_aux,
name="DCGAN")
else:
DCGAN_output,y_aux = discriminator(generated_image)
DCGAN = Model(input=[noise_input,image_input],
output=[DCGAN_output,y_aux],
name="DCGAN")
visualize_model(DCGAN)
return DCGAN
def DCGAN_naive(generator, discriminator, noise_dim, img_source_dim):
"""DCGAN generator + discriminator model
Args:
generator: keras generator model
discriminator: keras discriminator model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator([noise_input,image_input])
DCGAN_output = discriminator(generated_image)
DCGAN = Model(input=[noise_input,image_input],
output=DCGAN_output)
visualize_model(DCGAN)
return DCGAN
def DCGAN_naive2(generator, discriminator, noise_dim, img_source_dim):
"""DCGAN generator + discriminator model
Args:
generator: keras generator model
discriminator: keras discriminator model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator([noise_input,image_input])
DCGAN_output = discriminator(generated_image)
DCGAN = Model(input=[noise_input,image_input],
output=DCGAN_output)
visualize_model(DCGAN)
return DCGAN
def reconstructor(generator1, generator2, noise_dim, img_source_dim):
"""DCGAN generator + discriminator model
Args:
generator: keras generator model
discriminator: keras discriminator model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
noise_input2 = Input(shape=noise_dim, name="noise_input2")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator1([noise_input,image_input])
reconstructor_output = generator2([noise_input2,generated_image])
reconstructor = Model(input=[noise_input,image_input,noise_input2],
output=reconstructor_output)
visualize_model(reconstructor)
return reconstructor
def reconstructorClass(generator1, generator2, classificator, noise_dim, img_source_dim):
"""DCGAN generator + discriminator model
Args:
generator: keras generator model
discriminator: keras discriminator model
noise_dim: generator input noise dimension
img_dim: real image data dimension
Returns:
keras model
"""
noise_input = Input(shape=noise_dim, name="noise_input")
noise_input2 = Input(shape=noise_dim, name="noise_input2")
image_input = Input(shape=img_source_dim, name="image_input")
generated_image = generator1([noise_input,image_input])
reconstructor_output = generator2([noise_input2,generated_image])
recClass_output = classificator(reconstructor_output)
reconstructor = Model(input=[noise_input,image_input,noise_input2],
output=recClass_output)
visualize_model(reconstructor)
return reconstructor
class RandomWeightedAverage(_Merge):
"""Takes a randomly-weighted average of two tensors. In geometric terms, this outputs a random point on the line
between each pair of input points.
Inheriting from _Merge is a little messy but it was the quickest solution I could think of.
Improvements appreciated."""
def _merge_function(self, inputs):
weights = K.random_uniform((32, 1, 1, 1))
return (weights * inputs[0]) + ((1 - weights) * inputs[1])
def disc_penalty(discriminator_model, noise_dim, img_source_dim, opt, model_name="disc_penalty_model"):
image_input_real = Input(shape=img_source_dim, name="image_input_real")
image_input_gen = Input(shape=img_source_dim, name="image_input_gen")
averaged_samples = RandomWeightedAverage()([image_input_real, image_input_gen])
averaged_samples_output = discriminator_model(averaged_samples)
disc_penalty_model = Model(input=[image_input_real,image_input_gen],
output=averaged_samples_output)
partial_gp_loss = partial(gradient_penalty_loss,
averaged_samples=averaged_samples,
gradient_penalty_weight=10)
partial_gp_loss.__name__ = 'gradient_penalty' # Functions need names or Keras will throw an error
disc_penalty_model.compile(loss=partial_gp_loss, optimizer=opt)
return disc_penalty_model
| 45.869416 | 161 | 0.685871 | 3,749 | 26,696 | 4.672713 | 0.084022 | 0.010618 | 0.048807 | 0.046809 | 0.821212 | 0.810595 | 0.794897 | 0.786049 | 0.768752 | 0.750314 | 0 | 0.034258 | 0.179915 | 26,696 | 581 | 162 | 45.948365 | 0.765907 | 0.046299 | 0 | 0.672414 | 0 | 0 | 0.080854 | 0.001841 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.059113 | null | null | 0.002463 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4c26788bcda7d0dce38d3b4cf4eac5719a1956f6 | 718 | py | Python | api/tests/unittests/__init__.py | frach/python-aws-rest-api | f3e1288f0d6516cfa40c799d00792692fbed69fb | [
"MIT"
] | 1 | 2021-09-09T12:17:07.000Z | 2021-09-09T12:17:07.000Z | api/tests/unittests/__init__.py | frach/python-aws-rest-api | f3e1288f0d6516cfa40c799d00792692fbed69fb | [
"MIT"
] | 1 | 2021-10-19T13:51:40.000Z | 2021-10-20T19:50:00.000Z | api/tests/unittests/__init__.py | frach/python-aws-rest-api | f3e1288f0d6516cfa40c799d00792692fbed69fb | [
"MIT"
] | null | null | null | from functools import wraps
from botocore.stub import Stubber
# def stub_boto3_client(client):
# def test_function_wrapper(test_function):
# @wraps(test_function)
# def wrapper(*args, **kwargs):
# with Stubber(client) as stubber:
# return test_function(*args, stubber, **kwargs)
# return wrapper
# return test_function_wrapper
def stub_boto3_resource(resource):
def test_function_wrapper(test_function):
@wraps(test_function)
def wrapper(*args, **kwargs):
with Stubber(resource.meta.client) as stubber:
return test_function(*args, stubber, **kwargs)
return wrapper
return test_function_wrapper
| 31.217391 | 64 | 0.66156 | 81 | 718 | 5.641975 | 0.246914 | 0.262582 | 0.166302 | 0.09628 | 0.743982 | 0.743982 | 0.743982 | 0.743982 | 0.743982 | 0.743982 | 0 | 0.003717 | 0.250696 | 718 | 22 | 65 | 32.636364 | 0.845725 | 0.428969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3 | false | 0 | 0.2 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
4c351c23b54680ed9ff1506fa5033ab1c3fd1f47 | 105 | py | Python | Oefeningen/standalone/functions_1.py | Seviran/Python_3 | e30ead250129d25bbc0a7ee2f6298775b2f4529a | [
"MIT"
] | null | null | null | Oefeningen/standalone/functions_1.py | Seviran/Python_3 | e30ead250129d25bbc0a7ee2f6298775b2f4529a | [
"MIT"
] | null | null | null | Oefeningen/standalone/functions_1.py | Seviran/Python_3 | e30ead250129d25bbc0a7ee2f6298775b2f4529a | [
"MIT"
] | null | null | null |
def generate_evens():
return [num for num in range(1, 50) if num % 2 == 0]
print(generate_evens())
| 17.5 | 56 | 0.647619 | 18 | 105 | 3.666667 | 0.777778 | 0.393939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060241 | 0.209524 | 105 | 5 | 57 | 21 | 0.73494 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0 | 0.333333 | 0.666667 | 0.333333 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
d5b976da93ec873d901cab21329c1b51a3255210 | 3,532 | py | Python | 3/3b.py | atnguyen1/AdventOfCode2021 | 3a7070d97bd7357b5458131a283178fded4b7a20 | [
"MIT"
] | null | null | null | 3/3b.py | atnguyen1/AdventOfCode2021 | 3a7070d97bd7357b5458131a283178fded4b7a20 | [
"MIT"
] | null | null | null | 3/3b.py | atnguyen1/AdventOfCode2021 | 3a7070d97bd7357b5458131a283178fded4b7a20 | [
"MIT"
] | null | null | null | import sys
import copy
from collections import Counter
data = list()
filtered_data = list()
position_map_initial = list()
# Initialize Data structures
with open('3.input.txt', 'r') as fh:
data = fh.read().split('\n')
data = [list(x) for x in data]
for pos in data[0]:
position_map_initial.append(Counter())
filtered_data = copy.deepcopy(data)
current_inspection_index = 0
oxygen = None
# Find Oxygen
while len(filtered_data) > 1:
position_map = copy.deepcopy(position_map_initial)
# Count
for d in filtered_data:
for index, char in enumerate(d):
position_map[index][char] += 1
gamma = list()
epsilon = list()
# print('Index', current_inspection_index)
# print(position_map)
for i, c in enumerate(position_map):
sums = c.most_common(2)
if len(sums) == 1:
gamma.append(sums[0][0])
epsilon.append(sums[0][0])
continue
if sums[0][1] == sums[1][1]: # Equal counts
if sums[0][0] == '0':
gamma.append(sums[1][0]) # Keep the 1
epsilon.append(sums[0][0]) # Keep the 0
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
# Filter List based on gamma
# print('Gamma', gamma)
g_filter = gamma[current_inspection_index]
updated_list = list()
for f in filtered_data:
if f[current_inspection_index] == g_filter:
updated_list.append(f)
#for u in updated_list:
# print(u)
filtered_data = updated_list
current_inspection_index += 1
oxygen = ''.join(filtered_data[0])
print('Oxygen', oxygen, int(oxygen, 2))
# Reset Variables
filtered_data = copy.deepcopy(data)
current_inspection_index = 0
co2 = None
# Find CO2
while len(filtered_data) > 1:
position_map = copy.deepcopy(position_map_initial)
# Count
for d in filtered_data:
for index, char in enumerate(d):
position_map[index][char] += 1
gamma = list()
epsilon = list()
# print('Index', current_inspection_index)
# print(position_map)
for i, c in enumerate(position_map):
sums = c.most_common(2)
if len(sums) == 1:
gamma.append(sums[0][0])
epsilon.append(sums[0][0])
continue
if sums[0][1] == sums[1][1]: # Equal counts
if sums[0][0] == '0':
gamma.append(sums[1][0]) # Keep the 1
epsilon.append(sums[0][0]) # Keep the 0
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
# Filter List based on gamma
#print('Epsilon', epsilon)
e_filter = epsilon[current_inspection_index]
updated_list = list()
for f in filtered_data:
if f[current_inspection_index] == e_filter:
updated_list.append(f)
#for u in updated_list:
# print(u)
filtered_data = updated_list
current_inspection_index += 1
co2 = ''.join(filtered_data[0])
print('Co2', co2, int(co2, 2))
print('Final Val', int(oxygen, 2) * int(co2, 2))
'''
filtered_data = data.copy()
current_inspection_index = 0
oxygen = None
co2 = None
while len(filtered_data) > 1:
position_map = list()
# Count
for d in filtered_data:
for index, char in enumerate(d):
position_map[index][char] += 1
gamma = list()
epsilon = list()
for i, c in enumerate(position_map):
sums = c.most_common(2)
if sums[0][1] == sums[1][1]: # Equal counts
if sums[0] == '0':
gamma.append(sums[1][0]) # Keep the 1
epsilon.append(sums[0][0]) # Keep the 0
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
else:
gamma.append(sums[0][0])
epsilon.append(sums[1][0])
# Filter List based on gamma and epsilon
''' | 21.802469 | 51 | 0.667044 | 556 | 3,532 | 4.107914 | 0.138489 | 0.096322 | 0.042032 | 0.068301 | 0.814799 | 0.795534 | 0.776708 | 0.762697 | 0.762697 | 0.718039 | 0 | 0.035294 | 0.181767 | 3,532 | 162 | 52 | 21.802469 | 0.755017 | 0.121744 | 0 | 0.731707 | 0 | 0 | 0.014232 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.036585 | 0 | 0.036585 | 0.036585 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d5c05e56860c8215b76c7c091463a382b4053a2a | 5,380 | py | Python | modules/database/database.py | camargo2019/conecta | 3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99 | [
"MIT"
] | 1 | 2021-08-01T05:40:31.000Z | 2021-08-01T05:40:31.000Z | modules/database/database.py | camargo2019/conecta | 3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99 | [
"MIT"
] | null | null | null | modules/database/database.py | camargo2019/conecta | 3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import os
import sys
import json
import requests
import time
from tkinter import *
from PIL import ImageTk, Image
from ..Notifications import Notification
class DataBase:
def __init__(self):
self.url = "http://34.95.239.34:8080"
def login_valida(self, cpf):
dados = json.dumps({'cpf': cpf})
try:
try:
x = requests.post(self.url + "/api/activate/employee", data=dados)
except:
while True:
try:
x = requests.post(self.url+"/api/activate/employee", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
dedocejson = json.loads(x.text)
x.close()
if (dedocejson["status"]["result"] == "error"):
return dedocejson["status"]["message"]
else:
return "Sucesso"
def dados_usuario(self, cpf):
try:
dados = json.dumps({'cpf': cpf})
try:
x = requests.post(self.url + "/api/activate/employee", data=dados)
except:
while True:
try:
x = requests.post(self.url+"/api/activate/employee", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
dedocejson = json.loads(x.text)
x.close()
if (dedocejson["status"]["result"] == "error"):
return False
else:
return dedocejson
def ativo_user(self, id):
try:
try:
x = requests.get(self.url + "/api/has/activity/"+str(id))
except:
while True:
try:
x = requests.get(self.url+"/api/has/activity/"+str(id))
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
dedocejson = json.loads(x.text)
x.close()
if (dedocejson["activity"]["status"] == False):
return False
else:
return True
def ver_dados(self, cpf):
try:
dados = json.dumps({'cpf': cpf})
try:
x = requests.post(self.url + "/api/activate/employee", data=dados)
except:
while True:
try:
x = requests.post(self.url+"/api/activate/employee", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
decodejson = json.loads(x.text)
x.close()
return decodejson
def update_status(self, IdUser):
try:
dados = json.dumps({'employee_id': str(IdUser)})
try:
x = requests.put(self.url + "/api/register/activity", data=dados)
except:
while True:
try:
x = requests.put(self.url+"/api/register/activity", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
decodejson = json.loads(x.text)
x.close()
return decodejson
def update_status_end(self, IdUser):
try:
dados = json.dumps({'employee_id': str(IdUser)})
try:
x = requests.put(self.url + "/api/end/workday", data=dados)
except:
while True:
try:
x = requests.put(self.url+"/api/end/workday", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
decodejson = json.loads(x.text)
x.close()
return decodejson
def programas_company(self, idEmpresa):
try:
try:
x = requests.get(self.url + "/api/programs/"+ str(idEmpresa))
except:
while True:
try:
x = requests.get(self.url+"/api/programs/"+ str(idEmpresa))
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
decodejson = json.loads(x.text)
x.close()
return decodejson
def init_workday_chat(self, cpf):
dados = json.dumps({'cpf': str(cpf)})
try:
try:
x = requests.post(self.url + "/api/init/workday", data=dados)
except:
while True:
try:
x = requests.post(self.url + "/api/init/workday", data=dados)
break
except:
pass
time.sleep(5)
except:
Notification(title="ConectaIT", subtitle="Ops..!", descrition="Error[03] - Por favor, Contate o Administrador do sistema!",
icone="alliance")
return "Contate o Adminstrador do Sistema"
decodejson = json.loads(x.text)
x.close()
return decodejson
| 25.619048 | 127 | 0.617844 | 659 | 5,380 | 5.019727 | 0.145675 | 0.035973 | 0.058041 | 0.038694 | 0.877267 | 0.877267 | 0.862757 | 0.862757 | 0.860943 | 0.857316 | 0 | 0.009337 | 0.243494 | 5,380 | 209 | 128 | 25.741627 | 0.80344 | 0.00316 | 0 | 0.859551 | 0 | 0 | 0.260819 | 0.034155 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050562 | false | 0.044944 | 0.044944 | 0 | 0.207865 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
91182ed3807e5c65fc300e911fc241e61daf2c70 | 229 | py | Python | harness/determined/layers/__init__.py | wahello/determined | 1876c04a3741a55bccf1c01bb624708f20c1faa6 | [
"Apache-2.0"
] | null | null | null | harness/determined/layers/__init__.py | wahello/determined | 1876c04a3741a55bccf1c01bb624708f20c1faa6 | [
"Apache-2.0"
] | null | null | null | harness/determined/layers/__init__.py | wahello/determined | 1876c04a3741a55bccf1c01bb624708f20c1faa6 | [
"Apache-2.0"
] | null | null | null | from determined.layers._harness_profiler import HarnessProfiler
from determined.layers._worker_process import WorkerProcessContext
from determined.layers._workload_sequencer import WorkloadSequencer, make_compatibility_workloads
| 57.25 | 97 | 0.912664 | 24 | 229 | 8.375 | 0.666667 | 0.208955 | 0.298507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056769 | 229 | 3 | 98 | 76.333333 | 0.930556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9136ee8a7b2ed98e28d665380e9bdf55b89c2349 | 24,577 | py | Python | stylegan2/NN_getRepThreshPairImg_testCode_forStylegan2.py | chenqiguo/GAN_replication | 18e71914164f0d735354afb0134ce00570080ecd | [
"OLDAP-2.3"
] | 2 | 2021-11-11T00:18:28.000Z | 2021-12-28T01:10:25.000Z | stylegan2/NN_getRepThreshPairImg_testCode_forStylegan2.py | chenqiguo/GAN_replication | 18e71914164f0d735354afb0134ce00570080ecd | [
"OLDAP-2.3"
] | null | null | null | stylegan2/NN_getRepThreshPairImg_testCode_forStylegan2.py | chenqiguo/GAN_replication | 18e71914164f0d735354afb0134ce00570080ecd | [
"OLDAP-2.3"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 26 21:48:12 2020
@author: guo.1648
"""
# referenced from NN_getRepThreshPairImg_testCode_forBiggan.py.
# put the NN matching pair images with L2-norm distance smaller than threshold = 10000
# into the dst folder.
import re
import numpy as np
from shutil import copyfile
#5000 #6000 #7000 #8000 #9000 #10000 # <-- for pixel-wise matching
#0.35 #0.4 #0.45 #0.5 # <-- for simCLR matching
#10 11 12 13 14 15 <- for inception v3
NNmatchDist_threshold_value = 10000
total_sample_num = 1024
"""
#### tmp: inception v3:
# for biggan FLOWER_128_sub1000: 1000 images dataset
# for Itr38950:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/biggan/NNquery_inception_v3/FLOWER_128_sub1000/Itr38950/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/biggan/NNquery_inception_v3/FLOWER_128_sub1000/Itr38950/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/biggan/NNquery_inception_v3/FLOWER_128_sub1000/Itr38950/NNmatchResult_threshold15/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/biggan/NNquery_inception_v3/FLOWER_128_sub1000/Itr38950/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### tmp: inception v3:
# for stylegan2 FLOWER_128_sub1000: 1000 images dataset
# for fakes003248:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_inception_v3/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_inception_v3/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_inception_v3/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult_threshold15/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_inception_v3/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for FLOWER_128: 8189 images dataset (the original FLOWER dataset)
# for fakes002526:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128/fakes002526/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for simCLR FLOWER_128: 8189 images dataset (the original FLOWER dataset)
# for fakes002526:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128/fakes002526/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128/fakes002526/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128/fakes002526/NNmatchResult_threshold0.4/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128/fakes002526/NNmatchDist_smallerThanThresh.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128: 8189 images dataset
# for fakes001925:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128/fakes001925/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128/fakes001925/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for FLOWER_128_sub1000: 1000 images dataset (resume)
# for fakes003248:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for simCLR FLOWER_128_sub1000: 1000 images dataset (resume)
# for fakes003248:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult_threshold0.4/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist_smallerThanThresh.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128_sub1000: 1000 images dataset
# for fakes001684:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for FLOWER_128_sub4000: 4000 images dataset (resume)
# for fakes003248:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for simCLR FLOWER_128_sub4000: 4000 images dataset (resume)
# for fakes003248:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128_sub4000_resume/fakes003248/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NNquery_simCLR_v2/FLOWER_128_sub4000_resume/fakes003248/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128_sub4000_resume/fakes003248/NNmatchResult_threshold0.35/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NNquery_simCLR_v2/FLOWER_128_sub4000_resume/fakes003248/NNmatchDist_smallerThanThresh.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128_sub4000: 4000 images dataset
# for fakes001925:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CelebA_128_sub200: 200 images dataset
# for fakes007700:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub200/fakes007700/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub200/fakes007700/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CelebA_128_sub600: 600 images dataset
# for fakes005414:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub600/fakes005414/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub600/fakes005414/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CelebA_128_sub1000: 1000 images dataset
# for fakes004933:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub1000/fakes004933/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub1000/fakes004933/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CelebA_128_sub4000: 4000 images dataset
# for fakes003369:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub4000/fakes003369/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub4000/fakes003369/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CelebA_128_sub8000: 8000 images dataset
# for fakes001684:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub8000/fakes001684/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CelebA_128_sub8000/fakes001684/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for MNIST_128_sub10000: 10000 images dataset
# for fakes005173:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub10000/fakes005173/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub10000/fakes005173/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for MNIST_128_sub10000: 10000 images dataset, 3ch:
# for fakes005173:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchResult_threshold9000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for MNIST_128_sub30000: 30000 images dataset
# for fakes005053:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub30000/fakes005053/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub30000/fakes005053/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for MNIST_128_sub30000: 30000 images dataset, bi:
# for fakes005053:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchResult_threshold8000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for MNIST_128_train: 60000 images dataset
# for fakes003609:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_train/fakes003609/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/MNIST_128_train/fakes003609/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub10000: 10000 images dataset, bi:
# for fakes004812:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub10000/fakes004812/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub10000/fakes004812/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub30000: 30000 images dataset, bi:
# for fakes004692:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub30000/fakes004692/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub30000/fakes004692/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub60000: 60000 images dataset, bi:
# for fakes006497:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub60000/fakes006497/NNmatchResult_threshold10000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub60000/fakes006497/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub1000_resume: 1000 images dataset, bi:
# for fakes002165:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub5000_resume: 5000 images dataset, bi:
# for fakes000000:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for LSUN_128_sub200: 200 images dataset, bi:
# for fakes006497:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub200/fakes006497/NNmatchResult_threshold7000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/LSUN_128_sub200/fakes006497/NNmatchDist_smallerThanThresh.txt'
"""
### for rebuttal: CIFAR10:
"""
#### for CIFAR10_32_sub1000: 1000 images dataset
# for fakes002813:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchResult_threshold1800/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CIFAR10_32_sub4000: 4000 images dataset
# for fakes003014:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchResult_threshold1800/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CIFAR10_32_sub8000: 8000 images dataset
# for fakes003014:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchResult_threshold1800/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for CIFAR10_32_sub10000: 10000 images dataset
# for fakes002009:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchResult_threshold1800/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchDist_smallerThanThresh.txt'
"""
NNmatchDist_threshold_value = 18000 #19000 # 20000 21000 23000 25000
total_sample_num = 480
"""
#### for FLOWER_256_sub1000: 1000 images dataset
# for fakes002009:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub1000/00002/fakes004435/NNmatchResult_threshold18000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub1000/00002/fakes004435/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for FLOWER_256_sub4000: 4000 images dataset
# for fakes000000:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub4000/00002/fakes006128/NNmatchResult_threshold18000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub4000/00002/fakes006128/NNmatchDist_smallerThanThresh.txt'
"""
"""
#### for FLOWER_256_sub6000: 6000 images dataset
# for fakes000403:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub6000/00002/fakes006290/NNmatchResult_threshold18000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256_sub6000/00002/fakes006290/NNmatchDist_smallerThanThresh.txt'
"""
#"""
#### for FLOWER_256: 8189 images dataset
# for fakes000161:
srcRootDir_img = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/NNmatchResult/'
srcTxtFile = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/NNmatchDist.txt'
dstRootDir_img = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256/00002/fakes006209/NNmatchResult_threshold18000/'
dstTxtName_matchDistThresh = '/eecf/cbcsl/data100b/Chenqi/gan_results_for_presentation/stylegan2/NN_query/FLOWER_256/00002/fakes006209/NNmatchDist_smallerThanThresh.txt'
#"""
if __name__ == '__main__':
match_distance_thresh_strs = ''
match_num = 0
for line in open(srcTxtFile):
pairImgName = line.split(': match_distance = ')[0]
l2Dist = float(line.split(': match_distance = ')[-1])
#print()
if l2Dist <= NNmatchDist_threshold_value:
srcImgName = srcRootDir_img + pairImgName
dstImgName = dstRootDir_img + pairImgName
copyfile(srcImgName, dstImgName)
match_num += 1
match_distance_thresh_strs += line
match_percent = match_num / total_sample_num * 100
match_distance_thresh_strs = 'match_num = ' + str(match_num) + '\n' \
+ 'total sample_num = ' + str(total_sample_num) + '\n' \
+ 'match_percent = ' + str(match_percent) + '%\n' \
+ match_distance_thresh_strs
# newly added: save the values related to match_distance smaller than thresh into txt file:
f = open(dstTxtName_matchDistThresh, 'w')
f.write(match_distance_thresh_strs)
f.close()
| 63.342784 | 190 | 0.836514 | 3,044 | 24,577 | 6.434954 | 0.067346 | 0.064325 | 0.121503 | 0.164386 | 0.922401 | 0.912395 | 0.87768 | 0.856136 | 0.849653 | 0.844854 | 0 | 0.140185 | 0.055825 | 24,577 | 387 | 191 | 63.50646 | 0.703943 | 0.024942 | 0 | 0 | 0 | 0.129032 | 0.305817 | 0.249862 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.096774 | 0 | 0.096774 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e6b56e8dc1be291833a88999843115478c7a964e | 102,527 | py | Python | tests/dcerpc/test_samr.py | StanHardy/impacket | 769c3196124af64d7bc08d51ae4b651e61a87037 | [
"Apache-1.1"
] | 8 | 2022-03-23T13:02:37.000Z | 2022-03-27T04:30:16.000Z | tests/dcerpc/test_samr.py | anno5750/impacket | ed7082cd0bc0d951f6eefb0a98c4c1360fe1a8a2 | [
"Apache-1.1"
] | null | null | null | tests/dcerpc/test_samr.py | anno5750/impacket | ed7082cd0bc0d951f6eefb0a98c4c1360fe1a8a2 | [
"Apache-1.1"
] | null | null | null | # Impacket - Collection of Python classes for working with network protocols.
#
# SECUREAUTH LABS. Copyright (C) 2021 SecureAuth Corporation. All rights reserved.
#
# This software is provided under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# Tested so far:
# (h)SamrCloseHandle
# (h)SamrConnect5
# (h)SamrConnect4
# (h)SamrConnect2
# (h)SamrConnect
# (h)SamrOpenDomain
# (h)SamrOpenGroup
# (h)SamrOpenAlias
# (h)SamrOpenUser
# (h)SamrEnumerateDomainsInSamServer
# (h)SamrLookupNamesInDomain
# (h)SamrLookupIdsInDomain
# (h)SamrEnumerateGroupsInDomain
# (h)SamrEnumerateAliasesInDomain
# (h)SamrEnumerateUsersInDomain
# (h)SamrGetGroupsForUser
# (h)SamrQueryDisplayInformation3
# (h)SamrQueryDisplayInformation2
# (h)SamrQueryDisplayInformation
# (h)SamrGetDisplayEnumerationIndex2
# (h)SamrGetDisplayEnumerationIndex
# (h)SamrCreateGroupInDomain
# (h)SamrDeleteGroup
# (h)SamrCreateAliasInDomain
# (h)SamrDeleteAlias
# (h)SamrCreateUser2InDomain
# (h)SamrDeleteUser
# (h)SamrQueryInformationDomain2
# hSamrQueryInformationDomain
# hSamrSetInformationDomain
# (h)SamrQueryInformationGroup
# (h)SamrSetInformationGroup
# hSamrQueryInformationAlias
# hSamrSetInformationAlias
# SamrQueryInformationAlias
# SamrSetInformationAlias
# (h)SamrQueryInformationUser2
# (h)SamrSetInformationUser2
# SamrQueryInformationUser
# SamrSetInformationUser
# (h)SamrAddMemberToGroup
# (h)SamrRemoveMemberFromGroup
# (h)SamrGetMembersInGroup
# (h)SamrGetMembersInAlias
# (h)SamrAddMemberToAlias
# (h)SamrRemoveMemberFromAlias
# (h)SamrAddMultipleMembersToAlias
# (h)SamrRemoveMultipleMembersFromAliass
# (h)SamrRemoveMemberFromForeignDomain
# (h)SamrGetAliasMembership
# (h)SamrSetMemberAttributesOfGroup
# (h)SamrGetUserDomainPasswordInformation
# (h)SamrGetDomainPasswordInformation
# (h)SamrRidToSid
# SamrSetDSRMPassword
# (h)SamrValidatePassword
# (h)SamrQuerySecurityObject
# (h)SamrSetSecurityObject
# (h)SamrChangePasswordUser
# SamrOemChangePasswordUser2
# (h)SamrUnicodeChangePasswordUser2
# (h)SamrLookupDomainInSamServer
# Not yet
# SamrCreateUserInDomain
#
import pytest
import unittest
from tests.dcerpc import DCERPCTests
import string
import random
from six import b
from six import assertRaisesRegex
from impacket import crypto
from impacket.dcerpc.v5 import samr
from impacket.dcerpc.v5 import dtypes
from impacket import nt_errors, ntlm
from impacket.dcerpc.v5.ndr import NULL
class SAMRTests(DCERPCTests):
iface_uuid = samr.MSRPC_UUID_SAMR
authn = True
authn_level = ntlm.NTLM_AUTH_PKT_INTEGRITY
server_name_string = "BETO\x00"
full_name_string = "BETO"
test_string = "BETUS"
test_account = "testAccount"
test_group = "testGroup"
def get_domain_handle(self, dce):
request = samr.SamrConnect()
request['ServerName'] = self.server_name_string
request['DesiredAccess'] = samr.DELETE | samr.READ_CONTROL | samr.WRITE_DAC | samr.WRITE_OWNER | samr.ACCESS_SYSTEM_SECURITY | samr.GENERIC_READ | samr.GENERIC_WRITE | samr.GENERIC_EXECUTE | samr.SAM_SERVER_CONNECT | samr.SAM_SERVER_SHUTDOWN | samr.SAM_SERVER_INITIALIZE | samr.SAM_SERVER_CREATE_DOMAIN | samr.SAM_SERVER_ENUMERATE_DOMAINS | samr.SAM_SERVER_LOOKUP_DOMAIN | samr.SAM_SERVER_READ | samr.SAM_SERVER_WRITE | samr.SAM_SERVER_EXECUTE
resp = dce.request(request)
request = samr.SamrEnumerateDomainsInSamServer()
request['ServerHandle'] = resp['ServerHandle']
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
resp2 = dce.request(request)
request = samr.SamrLookupDomainInSamServer()
request['ServerHandle'] = resp['ServerHandle']
request['Name'] = resp2['Buffer']['Buffer'][0]['Name']
resp3 = dce.request(request)
request = samr.SamrOpenDomain()
request['ServerHandle'] = resp['ServerHandle']
request['DesiredAccess'] = samr.DOMAIN_READ_PASSWORD_PARAMETERS | samr.DOMAIN_READ_OTHER_PARAMETERS | samr.DOMAIN_CREATE_USER | samr.DOMAIN_CREATE_ALIAS | samr.DOMAIN_LOOKUP | samr.DOMAIN_LIST_ACCOUNTS | samr.DOMAIN_ADMINISTER_SERVER | samr.DELETE | samr.READ_CONTROL | samr.ACCESS_SYSTEM_SECURITY | samr.DOMAIN_WRITE_OTHER_PARAMETERS | samr.DOMAIN_WRITE_PASSWORD_PARAMS
request['DomainId'] = resp3['DomainId']
resp4 = dce.request(request)
return resp4['DomainHandle']
def test_SamrCloseHandle(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCloseHandle()
request['SamHandle'] = domainHandle
resp = dce.request(request)
resp.dump()
def test_hSamrCloseHandle(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrCloseHandle(dce, domainHandle)
resp.dump()
def test_SamrConnect5(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect5()
request['ServerName'] = self.server_name_string
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['InVersion'] = 1
request['InRevisionInfo']['tag'] = 1
resp = dce.request(request)
resp.dump()
def test_hSamrConnect5(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect5(dce)
resp.dump()
def test_SamrConnect4(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect4()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
request['ClientRevision'] = 2
resp = dce.request(request)
resp.dump()
def test_hSamrConnect4(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect4(dce)
resp.dump()
def test_SamrConnect2(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect2()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
resp = dce.request(request)
resp.dump()
def test_hSamrConnect2(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect2(dce)
resp.dump()
def test_SamrConnect(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
resp = dce.request(request)
resp.dump()
def test_hSamrConnect(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect(dce)
resp.dump()
def test_SamrOpenDomain(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
resp = dce.request(request)
request = samr.SamrOpenDomain()
SID = 'S-1-5-352321536-2562177771-1589929855-2033349547'
request['ServerHandle'] = resp['ServerHandle']
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['DomainId'].fromCanonical(SID)
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_NO_SUCH_DOMAIN"):
dce.request(request)
def test_hSamrOpenDomain(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect(dce)
SID = 'S-1-5-352321536-2562177771-1589929855-2033349547'
sid = dtypes.RPC_SID()
sid.fromCanonical(SID)
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_NO_SUCH_DOMAIN"):
samr.hSamrOpenDomain(dce, serverHandle=resp['ServerHandle'], domainId=sid)
def test_SamrOpenGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
dce.request(request)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
def test_hSamrOpenGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrOpenGroup(dce, domainHandle, groupId=samr.DOMAIN_GROUP_RID_USERS)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
def test_SamrOpenAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenAlias()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['AliasId'] = 25
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_NO_SUCH_ALIAS"):
dce.request(request)
def test_hSamrOpenAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_NO_SUCH_ALIAS"):
samr.hSamrOpenAlias(dce, domainHandle, aliasId=25)
def test_SamrOpenUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp.dump()
def test_hSamrOpenUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrOpenUser(dce, domainHandle,
samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT,
samr.DOMAIN_USER_RID_ADMIN)
resp.dump()
def test_SamrEnumerateDomainsInSamServer(self):
dce, rpc_transport = self.connect()
request = samr.SamrConnect()
request['ServerName'] = self.server_name_string
request['DesiredAccess'] = samr.SAM_SERVER_ENUMERATE_DOMAINS | samr.SAM_SERVER_LOOKUP_DOMAIN
resp = dce.request(request)
request = samr.SamrEnumerateDomainsInSamServer()
request['ServerHandle'] = resp['ServerHandle']
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
resp2 = dce.request(request)
resp2.dump()
request = samr.SamrLookupDomainInSamServer()
request['ServerHandle'] = resp['ServerHandle']
request['Name'] = resp2['Buffer']['Buffer'][0]['Name']
resp3 = dce.request(request)
resp3.dump()
request = samr.SamrOpenDomain()
request['ServerHandle'] = resp['ServerHandle']
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['DomainId'] = resp3['DomainId']
resp4 = dce.request(request)
resp4.dump()
def test_hSamrEnumerateDomainsInSamServer(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrConnect(dce, desiredAccess=samr.SAM_SERVER_ENUMERATE_DOMAINS | samr.SAM_SERVER_LOOKUP_DOMAIN)
resp2 = samr.hSamrEnumerateDomainsInSamServer(dce, resp['ServerHandle'])
resp2.dump()
resp3 = samr.hSamrLookupDomainInSamServer(dce, resp['ServerHandle'], resp2['Buffer']['Buffer'][0]['Name'])
resp3.dump()
request = samr.SamrOpenDomain()
request['ServerHandle'] = resp['ServerHandle']
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['DomainId'] = resp3['DomainId']
resp4 = dce.request(request)
resp4.dump()
def test_SamrLookupNamesInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrLookupNamesInDomain()
request['DomainHandle'] = domainHandle
request['Count'] = 1
entry = dtypes.RPC_UNICODE_STRING()
entry['Data'] = 'Administrator'
#entry.fields['MaximumLength'] = len('Administrator\x00')*2
#entry.fields['Data'].fields['Data'].fields['MaximumCount'] = len('Administrator\x00')
request['Names'].append(entry)
request.fields['Names'].fields['MaximumCount'] = 1000
resp5 = dce.request(request)
resp5.dump()
def test_hSamrLookupNamesInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrLookupNamesInDomain(dce, domainHandle, ('Administrator', 'Guest'))
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >= 0:
pass
def test_SamrLookupIdsInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrLookupIdsInDomain()
request.dump()
request['DomainHandle'] = domainHandle
request['Count'] = 2
entry = dtypes.ULONG()
entry['Data'] = 500
request['RelativeIds'].append(entry)
entry = dtypes.ULONG()
entry['Data'] = 501
request['RelativeIds'].append(entry)
request.fields['RelativeIds'].fields['MaximumCount'] = 1000
resp5 = dce.request(request)
resp5.dump()
def test_hSamrLookupIdsInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrLookupIdsInDomain(dce, domainHandle, (500, 501))
resp.dump()
def test_SamrEnumerateGroupsInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateGroupsInDomain()
request['DomainHandle'] = domainHandle
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
def test_hSamrEnumerateGroupsInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrEnumerateGroupsInDomain(dce, domainHandle)
resp.dump()
def test_SamrEnumerateAliasesInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateAliasesInDomain()
request['DomainHandle'] = domainHandle
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
def test_hSamrEnumerateAliasesInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrEnumerateAliasesInDomain(dce, domainHandle)
resp.dump()
def test_SamrEnumerateUsersInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateUsersInDomain()
request['DomainHandle'] = domainHandle
request['UserAccountControl'] = samr.USER_NORMAL_ACCOUNT
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 8192
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
def test_hSamrEnumerateUsersInDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrEnumerateUsersInDomain(dce, domainHandle)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >=0:
pass
e.get_packet().dump()
def test_SamrGetGroupsForUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT | samr.USER_LIST_GROUPS
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp.dump()
request = samr.SamrGetGroupsForUser()
request['UserHandle'] = resp['UserHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrGetGroupsForUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT | samr.USER_LIST_GROUPS
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp.dump()
resp = samr.hSamrGetGroupsForUser(dce, resp['UserHandle'])
resp.dump()
def test_SamrQueryDisplayInformation3(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrQueryDisplayInformation3()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser
request['Index'] = 0
request['EntryCount'] = 100
request['PreferredMaximumLength'] = 8192
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >=0:
e.get_packet().dump()
else:
raise
for display_info_class in [samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayMachine,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayOemGroup]:
request = samr.SamrQueryDisplayInformation3()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = display_info_class
request['Index'] = 0
request['EntryCount'] = 100
request['PreferredMaximumLength'] = 8192
resp = dce.request(request)
resp.dump()
def test_hSamrQueryDisplayInformation3(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrQueryDisplayInformation3(dce, domainHandle, samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >=0:
e.get_packet().dump()
else:
raise
for display_info_class in [samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayMachine,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayOemGroup]:
resp = samr.hSamrQueryDisplayInformation3(dce, domainHandle, display_info_class)
resp.dump()
def test_SamrQueryDisplayInformation2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrQueryDisplayInformation2(dce, domainHandle, samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >= 0:
e.get_packet().dump()
else:
raise
for display_info_class in [samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayMachine,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayOemGroup]:
resp = samr.hSamrQueryDisplayInformation2(dce, domainHandle, display_info_class)
resp.dump()
def test_SamrQueryDisplayInformation(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrQueryDisplayInformation()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser
request['Index'] = 0
request['EntryCount'] = 100
request['PreferredMaximumLength'] = 8192
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >= 0:
e.get_packet().dump()
else:
raise
for display_info_class in [samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayMachine,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayOemGroup]:
request = samr.SamrQueryDisplayInformation()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = display_info_class
request['Index'] = 0
request['EntryCount'] = 100
request['PreferredMaximumLength'] = 8192
resp = dce.request(request)
resp.dump()
def test_hSamrQueryDisplayInformation(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp = samr.hSamrQueryDisplayInformation(dce, domainHandle, samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') >= 0:
e.get_packet().dump()
else:
raise
for display_info_class in [samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayMachine,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup,
samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayOemGroup]:
resp = samr.hSamrQueryDisplayInformation(dce, domainHandle, display_info_class)
resp.dump()
def test_SamrGetDisplayEnumerationIndex2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
for display_info_class, prefix in [(samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser, 'Gu'),
(samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup, 'Non')]:
request = samr.SamrGetDisplayEnumerationIndex2()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = display_info_class
request['Prefix'] = prefix
resp = dce.request(request)
resp.dump()
def test_hSamrGetDisplayEnumerationIndex2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
for display_info_class, prefix in [(samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser, 'Gu'),
(samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayGroup, 'Non')]:
resp = samr.hSamrGetDisplayEnumerationIndex2(dce, domainHandle, display_info_class, prefix)
resp.dump()
def test_SamrGetDisplayEnumerationIndex(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrGetDisplayEnumerationIndex(dce, domainHandle, samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser, 'Gu')
resp.dump()
def test_hSamrGetDisplayEnumerationIndex(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrGetDisplayEnumerationIndex()
request['DomainHandle'] = domainHandle
request['DisplayInformationClass'] = samr.DOMAIN_DISPLAY_INFORMATION.DomainDisplayUser
request['Prefix'] = 'Gu'
resp = dce.request(request)
resp.dump()
def test_SamrCreateGroupInDomain_SamrDeleteGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateGroupInDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_ACCESS_DENIED"):
dce.request(request)
request = samr.SamrDeleteGroup()
request['GroupHandle'] = domainHandle
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_OBJECT_TYPE_MISMATCH"):
dce.request(request)
def test_hSamrCreateGroupInDomain_hSamrDeleteGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_ACCESS_DENIED"):
samr.hSamrCreateGroupInDomain(dce, domainHandle, self.test_group, samr.GROUP_ALL_ACCESS | samr.DELETE)
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_OBJECT_TYPE_MISMATCH"):
samr.hSamrDeleteGroup(dce, domainHandle)
def test_SamrCreateAliasInDomain_SamrDeleteAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
resp.dump()
request = samr.SamrDeleteAlias()
request['AliasHandle'] = resp['AliasHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrCreateAliasInDomain_hSamrDeleteAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrCreateAliasInDomain(dce, domainHandle, self.test_group, samr.GROUP_ALL_ACCESS | samr.DELETE)
resp.dump()
resp = samr.hSamrDeleteAlias(dce, resp['AliasHandle'])
resp.dump()
def test_SamrCreateUser2InDomain_SamrDeleteUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateUser2InDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_account
request['AccountType'] = samr.USER_NORMAL_ACCOUNT
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.DELETE
resp = dce.request(request)
resp.dump()
request = samr.SamrDeleteUser()
request['UserHandle'] = resp['UserHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrCreateUser2InDomain_hSamrDeleteUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrCreateUser2InDomain(dce, domainHandle, self.test_account, samr.USER_NORMAL_ACCOUNT,samr.USER_READ_GENERAL | samr.DELETE )
resp.dump()
resp = samr.hSamrDeleteUser(dce, resp['UserHandle'])
resp.dump()
def test_SamrQueryInformationDomain2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
for domain_info_class in [samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainLogoffInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainOemInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainNameInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainServerRoleInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainReplicationInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainStateInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation2,
samr.DOMAIN_INFORMATION_CLASS.DomainLockoutInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation2,
]:
request = samr.SamrQueryInformationDomain2()
request['DomainHandle'] = domainHandle
request['DomainInformationClass'] = domain_info_class
resp = dce.request(request)
resp.dump()
def test_hSamrQueryInformationDomain2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
for domain_info_class in [samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainLogoffInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainOemInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainNameInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainServerRoleInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainReplicationInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainStateInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation2,
samr.DOMAIN_INFORMATION_CLASS.DomainLockoutInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation2,
]:
resp = samr.hSamrQueryInformationDomain2(dce, domainHandle, domain_info_class)
resp.dump()
def test_hSamrQueryInformationDomain_hSamrSetInformationDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation)
resp.dump()
resp['Buffer']['Password']['MaxPasswordAge']['LowPart'] = 11
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp['Buffer'])
resp.dump()
resp2 = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation)
resp2.dump()
self.assertEqual(11, resp2['Buffer']['Password']['MaxPasswordAge']['LowPart'])
resp2['Buffer']['Password']['MaxPasswordAge']['LowPart'] = 0
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp2['Buffer'])
resp.dump()
################################################################################
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation)
resp.dump()
resp['Buffer']['General']['ReplicaSourceNodeName'] = self.test_string
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_INVALID_INFO_CLASS"):
samr.hSamrSetInformationDomain(dce, domainHandle, resp['Buffer'])
################################################################################
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainLogoffInformation)
resp.dump()
oldData = resp['Buffer']['Logoff']['ForceLogoff']['LowPart']
resp['Buffer']['Logoff']['ForceLogoff']['LowPart'] = 11
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp['Buffer'])
resp.dump()
resp2 = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainLogoffInformation)
resp2.dump()
self.assertEqual(11, resp2['Buffer']['Logoff']['ForceLogoff']['LowPart'])
resp2['Buffer']['Logoff']['ForceLogoff']['LowPart'] = oldData
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp2['Buffer'])
resp.dump()
################################################################################
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainOemInformation)
resp.dump()
oldData = resp['Buffer']['Oem']['OemInformation']
resp['Buffer']['Oem']['OemInformation'] = self.test_string
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp['Buffer'])
resp.dump()
resp2 = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainOemInformation)
resp2.dump()
self.assertEqual(self.test_string, resp2['Buffer']['Oem']['OemInformation'])
resp2['Buffer']['Oem']['OemInformation'] = oldData
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp2['Buffer'])
resp.dump()
for domain_info_class in [samr.DOMAIN_INFORMATION_CLASS.DomainNameInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainServerRoleInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainStateInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainGeneralInformation2,
samr.DOMAIN_INFORMATION_CLASS.DomainLockoutInformation,
samr.DOMAIN_INFORMATION_CLASS.DomainModifiedInformation2,
]:
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, domain_info_class)
resp.dump()
resp = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainReplicationInformation)
resp.dump()
oldData = resp['Buffer']['Replication']['ReplicaSourceNodeName']
resp['Buffer']['Replication']['ReplicaSourceNodeName'] = self.test_string
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp['Buffer'])
resp.dump()
resp2 = samr.hSamrQueryInformationDomain(dce, domainHandle, samr.DOMAIN_INFORMATION_CLASS.DomainReplicationInformation)
resp2.dump()
self.assertEqual(self.test_string, resp2['Buffer']['Replication']['ReplicaSourceNodeName'])
resp2['Buffer']['Replication']['ReplicaSourceNodeName'] = oldData
resp = samr.hSamrSetInformationDomain(dce, domainHandle, resp2['Buffer'])
resp.dump()
def test_SamrQueryInformationGroup_SamrSetInformationGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp0 = dce.request(request)
resp0.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
request = samr.SamrQueryInformationGroup()
request['GroupHandle'] = resp0['GroupHandle']
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupGeneralInformation
resp = dce.request(request)
resp.dump()
################################################################################
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupNameInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Name']['Name']
req = samr.SamrSetInformationGroup()
req['GroupHandle'] = resp0['GroupHandle']
req['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupNameInformation
req['Buffer']['tag'] = samr.GROUP_INFORMATION_CLASS.GroupNameInformation
req['Buffer']['Name']['Name'] = self.test_string
resp = dce.request(req)
resp.dump()
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupNameInformation
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Name']['Name'])
req['Buffer']['Name']['Name'] = oldData
resp = dce.request(req)
resp.dump()
################################################################################
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAttributeInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Attribute']['Attributes']
req = samr.SamrSetInformationGroup()
req['GroupHandle'] = resp0['GroupHandle']
req['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAttributeInformation
req['Buffer']['tag'] = samr.GROUP_INFORMATION_CLASS.GroupAttributeInformation
req['Buffer']['Attribute']['Attributes'] = 2
resp = dce.request(req)
resp.dump()
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAttributeInformation
resp = dce.request(request)
resp.dump()
#self.assertEqual(2, resp['Buffer']['Attribute']['Attributes'])
req['Buffer']['Attribute']['Attributes'] = oldData
resp = dce.request(req)
resp.dump()
################################################################################
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAdminCommentInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['AdminComment']['AdminComment']
req = samr.SamrSetInformationGroup()
req['GroupHandle'] = resp0['GroupHandle']
req['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAdminCommentInformation
req['Buffer']['tag'] = samr.GROUP_INFORMATION_CLASS.GroupAdminCommentInformation
req['Buffer']['AdminComment']['AdminComment'] = self.test_string
resp = dce.request(req)
resp.dump()
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupAdminCommentInformation
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['AdminComment']['AdminComment'])
req['Buffer']['AdminComment']['AdminComment'] = oldData
resp = dce.request(req)
resp.dump()
################################################################################
request['GroupInformationClass'] = samr.GROUP_INFORMATION_CLASS.GroupReplicationInformation
resp = dce.request(request)
resp.dump()
def test_hSamrQueryInformationGroup_hSamrSetInformationGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
try:
resp0 = samr.hSamrOpenGroup(dce, domainHandle, samr.GROUP_ALL_ACCESS, samr.DOMAIN_GROUP_RID_USERS)
resp0.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
resp = samr.hSamrQueryInformationGroup(dce, resp0['GroupHandle'], samr.GROUP_INFORMATION_CLASS.GroupGeneralInformation)
resp.dump()
################################################################################
resp = samr.hSamrQueryInformationGroup(dce, resp0['GroupHandle'], samr.GROUP_INFORMATION_CLASS.GroupNameInformation)
resp.dump()
oldData = resp['Buffer']['Name']['Name']
req = samr.SAMPR_GROUP_INFO_BUFFER()
req['tag'] = samr.GROUP_INFORMATION_CLASS.GroupNameInformation
req['Name']['Name'] = self.test_string
resp = samr.hSamrSetInformationGroup(dce, resp0['GroupHandle'], req)
resp.dump()
resp = samr.hSamrQueryInformationGroup(dce, resp0['GroupHandle'],samr.GROUP_INFORMATION_CLASS.GroupNameInformation)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Name']['Name'])
req['Name']['Name'] = oldData
resp = samr.hSamrSetInformationGroup(dce, resp0['GroupHandle'], req)
resp.dump()
def test_hSamrQueryInformationAlias_hSamrSetInformationAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp4 = samr.hSamrEnumerateAliasesInDomain(dce, domainHandle)
resp4.dump()
resp0 = samr.hSamrOpenAlias(dce, domainHandle, aliasId=resp4['Buffer']['Buffer'][0]['RelativeId'])
resp0.dump()
resp = samr.hSamrQueryInformationAlias(dce, resp0['AliasHandle'], samr.ALIAS_INFORMATION_CLASS.AliasGeneralInformation)
resp.dump()
################################################################################
resp = samr.hSamrQueryInformationAlias(dce, resp0['AliasHandle'], samr.ALIAS_INFORMATION_CLASS.AliasNameInformation)
resp.dump()
oldData = resp['Buffer']['Name']['Name']
req = samr.SAMPR_ALIAS_INFO_BUFFER()
req['tag'] = samr.ALIAS_INFORMATION_CLASS.AliasNameInformation
req['Name']['Name'] = self.test_string
resp = samr.hSamrSetInformationAlias(dce, resp0['AliasHandle'], req)
resp.dump()
resp = samr.hSamrQueryInformationAlias(dce, resp0['AliasHandle'], samr.ALIAS_INFORMATION_CLASS.AliasNameInformation)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Name']['Name'])
req['Name']['Name'] = oldData
resp = samr.hSamrSetInformationAlias(dce, resp0['AliasHandle'], req)
resp.dump()
def test_SamrQueryInformationAlias_SamrSetInformationAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateAliasesInDomain()
request['DomainHandle'] = domainHandle
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
resp4.dump()
request = samr.SamrOpenAlias()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['AliasId'] = resp4['Buffer']['Buffer'][0]['RelativeId']
resp0 = dce.request(request)
resp0.dump()
request = samr.SamrQueryInformationAlias()
request['AliasHandle'] = resp0['AliasHandle']
request['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasGeneralInformation
resp = dce.request(request)
resp.dump()
################################################################################
request['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasNameInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Name']['Name']
req = samr.SamrSetInformationAlias()
req['AliasHandle'] = resp0['AliasHandle']
req['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasNameInformation
req['Buffer']['tag'] = samr.ALIAS_INFORMATION_CLASS.AliasNameInformation
req['Buffer']['Name']['Name'] = self.test_string
resp = dce.request(req)
resp.dump()
request['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasNameInformation
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Name']['Name'])
req['Buffer']['Name']['Name'] = oldData
resp = dce.request(req)
resp.dump()
################################################################################
request['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasAdminCommentInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['AdminComment']['AdminComment']
req = samr.SamrSetInformationAlias()
req['AliasHandle'] = resp0['AliasHandle']
req['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasAdminCommentInformation
req['Buffer']['tag'] = samr.ALIAS_INFORMATION_CLASS.AliasAdminCommentInformation
req['Buffer']['AdminComment']['AdminComment'] = self.test_string
resp = dce.request(req)
resp.dump()
request['AliasInformationClass'] = samr.ALIAS_INFORMATION_CLASS.AliasAdminCommentInformation
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['AdminComment']['AdminComment'])
req['Buffer']['AdminComment']['AdminComment'] = oldData
resp = dce.request(req)
resp.dump()
def test_SamrQueryInformationUser2_SamrSetInformationUser2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
#request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT | samr.USER_ALL_ACCESS | samr.USER_READ | samr.USER_READ_LOGON
request['DesiredAccess'] = \
samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_WRITE_PREFERENCES | samr.USER_READ_LOGON \
| samr.USER_READ_ACCOUNT | samr.USER_WRITE_ACCOUNT | samr.USER_CHANGE_PASSWORD | samr.USER_FORCE_PASSWORD_CHANGE \
| samr.USER_LIST_GROUPS | samr.USER_READ_GROUP_INFORMATION | samr.USER_WRITE_GROUP_INFORMATION | samr.USER_ALL_ACCESS \
| samr.USER_READ | samr.USER_WRITE | samr.USER_EXECUTE
# Get the user handle for the domain admin user
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp.dump()
request = samr.SamrQueryInformationUser2()
request['UserHandle'] = resp['UserHandle']
userHandle = resp['UserHandle']
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserGeneralInformation
resp = dce.request(request)
resp.dump()
# Set a new user comment and revert it back
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserPreferencesInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Preferences']['UserComment']
set_request = samr.SamrSetInformationUser2()
set_request['UserHandle'] = userHandle
set_request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserPreferencesInformation
set_request['Buffer'] = resp['Buffer']
set_request['Buffer']['Preferences']['UserComment'] = self.test_string
resp = dce.request(set_request)
resp.dump()
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Preferences']['UserComment'])
set_request['Buffer']['Preferences']['UserComment'] = oldData
resp = dce.request(set_request)
resp.dump()
# Get different user info classes
for user_info_class in [samr.USER_INFORMATION_CLASS.UserLogonInformation,
samr.USER_INFORMATION_CLASS.UserLogonHoursInformation,
samr.USER_INFORMATION_CLASS.UserAccountInformation,
]:
request['UserInformationClass'] = user_info_class
resp = dce.request(request)
resp.dump()
# Set a new full name and revert it back
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserNameInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Name']['FullName']
set_request = samr.SamrSetInformationUser2()
set_request['UserHandle'] = userHandle
set_request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserNameInformation
set_request['Buffer'] = resp['Buffer']
set_request['Buffer']['Name']['FullName'] = self.full_name_string
resp = dce.request(set_request)
resp.dump()
resp = dce.request(request)
resp.dump()
self.assertEqual(self.full_name_string, resp['Buffer']['Name']['FullName'])
set_request['Buffer']['Name']['FullName'] = oldData
resp = dce.request(set_request)
resp.dump()
# Set a new username and revert it back
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserAccountNameInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['AccountName']['UserName']
req = samr.SamrSetInformationUser2()
req['UserHandle'] = userHandle
req['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserAccountNameInformation
req['Buffer'] = resp['Buffer']
req['Buffer']['AccountName']['UserName'] = self.test_string
resp = dce.request(req)
resp.dump()
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['AccountName']['UserName'])
req['Buffer']['AccountName']['UserName'] = oldData
resp = dce.request(req)
resp.dump()
# Get different user info classes
for user_info_class in [samr.USER_INFORMATION_CLASS.UserFullNameInformation,
samr.USER_INFORMATION_CLASS.UserPrimaryGroupInformation,
samr.USER_INFORMATION_CLASS.UserHomeInformation,
samr.USER_INFORMATION_CLASS.UserScriptInformation,
samr.USER_INFORMATION_CLASS.UserProfileInformation,
samr.USER_INFORMATION_CLASS.UserAdminCommentInformation,
samr.USER_INFORMATION_CLASS.UserWorkStationsInformation,
samr.USER_INFORMATION_CLASS.UserControlInformation,
samr.USER_INFORMATION_CLASS.UserExpiresInformation,
samr.USER_INFORMATION_CLASS.UserParametersInformation,
samr.USER_INFORMATION_CLASS.UserAllInformation,
]:
request['UserInformationClass'] = user_info_class
resp = dce.request(request)
resp.dump()
# Get different user info classes that are internal
for internal_user_info_class in [samr.USER_INFORMATION_CLASS.UserInternal1Information,
samr.USER_INFORMATION_CLASS.UserInternal4Information,
samr.USER_INFORMATION_CLASS.UserInternal5Information,
samr.USER_INFORMATION_CLASS.UserInternal4InformationNew,
samr.USER_INFORMATION_CLASS.UserInternal5InformationNew
]:
request['UserInformationClass'] = internal_user_info_class
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_INVALID_INFO_CLASS"):
dce.request(request)
def test_hSamrQueryInformationUser2_hSamrSetInformationUser2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
# Get the user handle for the domain admin user
desiredAccess = \
samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_WRITE_PREFERENCES | samr.USER_READ_LOGON \
| samr.USER_READ_ACCOUNT | samr.USER_WRITE_ACCOUNT | samr.USER_CHANGE_PASSWORD | samr.USER_FORCE_PASSWORD_CHANGE \
| samr.USER_LIST_GROUPS | samr.USER_READ_GROUP_INFORMATION | samr.USER_WRITE_GROUP_INFORMATION | samr.USER_ALL_ACCESS \
| samr.USER_READ | samr.USER_WRITE | samr.USER_EXECUTE
resp = samr.hSamrOpenUser(dce, domainHandle, desiredAccess, samr.DOMAIN_USER_RID_ADMIN )
resp.dump()
userHandle = resp['UserHandle']
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserGeneralInformation)
resp.dump()
# Set a new user comment and revert it back
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserPreferencesInformation)
resp.dump()
oldData = resp['Buffer']['Preferences']['UserComment']
resp['Buffer']['Preferences']['UserComment'] = self.test_string
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserPreferencesInformation)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Preferences']['UserComment'])
resp['Buffer']['Preferences']['UserComment'] = oldData
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
# Get different user info classes
for user_info_class in [samr.USER_INFORMATION_CLASS.UserLogonInformation,
samr.USER_INFORMATION_CLASS.UserLogonHoursInformation,
samr.USER_INFORMATION_CLASS.UserAccountInformation,
]:
samr.hSamrQueryInformationUser2(dce, userHandle, user_info_class)
# Set a new full name and revert it back
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserNameInformation)
resp.dump()
oldData = resp['Buffer']['Name']['FullName']
resp['Buffer']['Name']['FullName'] = self.full_name_string
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
resp = samr.hSamrQueryInformationUser2(dce, userHandle,samr.USER_INFORMATION_CLASS.UserNameInformation)
resp.dump()
self.assertEqual(self.full_name_string, resp['Buffer']['Name']['FullName'])
resp['Buffer']['Name']['FullName'] = oldData
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
# Set a new username and revert it back
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserAccountNameInformation)
resp.dump()
oldData = resp['Buffer']['AccountName']['UserName']
resp['Buffer']['AccountName']['UserName'] = self.test_string
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
resp = samr.hSamrQueryInformationUser2(dce, userHandle, samr.USER_INFORMATION_CLASS.UserAccountNameInformation)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['AccountName']['UserName'])
resp['Buffer']['AccountName']['UserName'] = oldData
resp = samr.hSamrSetInformationUser2(dce, userHandle, resp['Buffer'])
resp.dump()
# Get different user info classes
for user_info_class in [samr.USER_INFORMATION_CLASS.UserFullNameInformation,
samr.USER_INFORMATION_CLASS.UserPrimaryGroupInformation,
samr.USER_INFORMATION_CLASS.UserHomeInformation,
samr.USER_INFORMATION_CLASS.UserScriptInformation,
samr.USER_INFORMATION_CLASS.UserProfileInformation,
samr.USER_INFORMATION_CLASS.UserAdminCommentInformation,
samr.USER_INFORMATION_CLASS.UserWorkStationsInformation,
samr.USER_INFORMATION_CLASS.UserControlInformation,
samr.USER_INFORMATION_CLASS.UserExpiresInformation,
samr.USER_INFORMATION_CLASS.UserParametersInformation,
samr.USER_INFORMATION_CLASS.UserAllInformation,
]:
samr.hSamrQueryInformationUser2(dce, userHandle, user_info_class)
# Get different user info classes that are internal
for internal_user_info_class in [samr.USER_INFORMATION_CLASS.UserInternal1Information,
samr.USER_INFORMATION_CLASS.UserInternal4Information,
samr.USER_INFORMATION_CLASS.UserInternal5Information,
samr.USER_INFORMATION_CLASS.UserInternal4InformationNew,
samr.USER_INFORMATION_CLASS.UserInternal5InformationNew
]:
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_INVALID_INFO_CLASS"):
samr.hSamrQueryInformationUser2(dce, userHandle, internal_user_info_class)
def test_SamrQueryInformationUser_SamrSetInformationUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
# Get the user handle for the domain admin user
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT | samr.USER_ALL_ACCESS | samr.USER_READ
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp.dump()
request = samr.SamrQueryInformationUser()
request['UserHandle'] = resp['UserHandle']
userHandle = resp['UserHandle']
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserGeneralInformation
resp = dce.request(request)
resp.dump()
# Set a new user comment and revert it back
request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserPreferencesInformation
resp = dce.request(request)
resp.dump()
oldData = resp['Buffer']['Preferences']['UserComment']
req = samr.SamrSetInformationUser()
req['UserHandle'] = userHandle
req['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserPreferencesInformation
req['Buffer'] = resp['Buffer']
req['Buffer']['Preferences']['UserComment'] = self.test_string
resp = dce.request(req)
resp.dump()
resp = dce.request(request)
resp.dump()
self.assertEqual(self.test_string, resp['Buffer']['Preferences']['UserComment'])
req['Buffer']['Preferences']['UserComment'] = oldData
resp = dce.request(req)
resp.dump()
# Get different user info classes
for user_info_class in [samr.USER_INFORMATION_CLASS.UserLogonInformation,
samr.USER_INFORMATION_CLASS.UserLogonHoursInformation,
samr.USER_INFORMATION_CLASS.UserAccountInformation,
samr.USER_INFORMATION_CLASS.UserNameInformation,
samr.USER_INFORMATION_CLASS.UserAccountNameInformation,
samr.USER_INFORMATION_CLASS.UserFullNameInformation,
samr.USER_INFORMATION_CLASS.UserPrimaryGroupInformation,
samr.USER_INFORMATION_CLASS.UserHomeInformation,
samr.USER_INFORMATION_CLASS.UserScriptInformation,
samr.USER_INFORMATION_CLASS.UserProfileInformation,
samr.USER_INFORMATION_CLASS.UserAdminCommentInformation,
samr.USER_INFORMATION_CLASS.UserWorkStationsInformation,
samr.USER_INFORMATION_CLASS.UserControlInformation,
samr.USER_INFORMATION_CLASS.UserExpiresInformation,
samr.USER_INFORMATION_CLASS.UserParametersInformation,
samr.USER_INFORMATION_CLASS.UserAllInformation,
]:
request['UserInformationClass'] = user_info_class
dce.request(request)
# Get different user info classes that are internal
for internal_user_info_class in [samr.USER_INFORMATION_CLASS.UserInternal1Information,
samr.USER_INFORMATION_CLASS.UserInternal4Information,
samr.USER_INFORMATION_CLASS.UserInternal5Information,
samr.USER_INFORMATION_CLASS.UserInternal4InformationNew,
samr.USER_INFORMATION_CLASS.UserInternal5InformationNew
]:
request['UserInformationClass'] = internal_user_info_class
with assertRaisesRegex(self, samr.DCERPCSessionError, "STATUS_INVALID_INFO_CLASS"):
dce.request(request)
def test_SamrAddMemberToGroup_SamrRemoveMemberFromGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
resp = dce.request(request)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
request = samr.SamrRemoveMemberFromGroup()
request['GroupHandle'] = resp['GroupHandle']
request['MemberId'] = samr.DOMAIN_USER_RID_ADMIN
try:
resp2 = dce.request(request)
resp2.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MEMBERS_PRIMARY_GROUP') < 0:
raise
request = samr.SamrAddMemberToGroup()
request['GroupHandle'] = resp['GroupHandle']
request['MemberId'] = samr.DOMAIN_USER_RID_ADMIN
request['Attributes'] = samr.SE_GROUP_ENABLED_BY_DEFAULT
try:
resp2 = dce.request(request)
resp2.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MEMBER_IN_GROUP') < 0:
raise
def test_hSamrAddMemberToGroup_hSamrRemoveMemberFromGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
resp = dce.request(request)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
try:
resp2 = samr.hSamrRemoveMemberFromGroup(dce, resp['GroupHandle'], samr.DOMAIN_USER_RID_ADMIN)
resp2.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MEMBERS_PRIMARY_GROUP') < 0:
raise
try:
resp2 = samr.hSamrAddMemberToGroup(dce, resp['GroupHandle'], samr.DOMAIN_USER_RID_ADMIN, samr.SE_GROUP_ENABLED_BY_DEFAULT)
resp2.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MEMBER_IN_GROUP') < 0:
raise
def test_SamrGetMembersInGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
request = samr.SamrGetMembersInGroup()
request['GroupHandle'] = resp['GroupHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrGetMembersInGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_NO_SUCH_DOMAIN') < 0:
raise
resp = samr.hSamrGetMembersInGroup(dce, resp['GroupHandle'])
resp.dump()
def test_SamrGetMembersInAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateAliasesInDomain()
request['DomainHandle'] = domainHandle
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
request = samr.SamrOpenAlias()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['AliasId'] = resp4['Buffer']['Buffer'][0]['RelativeId']
resp = dce.request(request)
resp.dump()
request = samr.SamrGetMembersInAlias()
request['AliasHandle'] = resp['AliasHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrGetMembersInAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrEnumerateAliasesInDomain()
request['DomainHandle'] = domainHandle
request['EnumerationContext'] = 0
request['PreferedMaximumLength'] = 500
status = nt_errors.STATUS_MORE_ENTRIES
while status == nt_errors.STATUS_MORE_ENTRIES:
try:
resp4 = dce.request(request)
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp4 = e.get_packet()
resp4['Buffer'].dump()
request['EnumerationContext'] = resp4['EnumerationContext']
status = resp4['ErrorCode']
request = samr.SamrOpenAlias()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['AliasId'] = resp4['Buffer']['Buffer'][0]['RelativeId']
resp = dce.request(request)
resp.dump()
resp = samr.hSamrGetMembersInAlias(dce, resp['AliasHandle'])
resp.dump()
def test_SamrAddMemberToAlias_SamrRemoveMemberFromAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
request = samr.SamrAddMemberToAlias()
request['AliasHandle'] = aliasHandle
request['MemberId'] = sid
resp2 = dce.request(request)
resp2.dump()
request = samr.SamrRemoveMemberFromAlias()
request['AliasHandle'] = aliasHandle
request['MemberId'] = sid
resp2 = dce.request(request)
resp2.dump()
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_hSamrAddMemberToAlias_hSamrRemoveMemberFromAlias(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrCreateAliasInDomain(dce, domainHandle, self.test_group, samr.GROUP_ALL_ACCESS | samr.DELETE)
resp.dump()
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
resp2 = samr.hSamrAddMemberToAlias(dce, aliasHandle, sid)
resp2.dump()
resp2 = samr.hSamrRemoveMemberFromAlias(dce, aliasHandle, sid)
resp2.dump()
resp = samr.hSamrDeleteAlias(dce, aliasHandle)
resp.dump()
def test_SamrAddMultipleMembersToAlias_SamrRemoveMultipleMembersFromAliass(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
guestSID = domainID + '-%d' % samr.DOMAIN_USER_RID_GUEST
sid1 = samr.RPC_SID()
sid1.fromCanonical(adminSID)
sid2 = samr.RPC_SID()
sid2.fromCanonical(guestSID)
si = samr.PSAMPR_SID_INFORMATION()
si['SidPointer'] = sid1
si2 = samr.PSAMPR_SID_INFORMATION()
si2['SidPointer'] = sid2
request = samr.SamrAddMultipleMembersToAlias()
request['AliasHandle'] = aliasHandle
request['MembersBuffer']['Count'] = 2
request['MembersBuffer']['Sids'].append(si)
request['MembersBuffer']['Sids'].append(si2)
resp2 = dce.request(request)
resp2.dump()
request = samr.SamrRemoveMultipleMembersFromAlias()
request['AliasHandle'] = resp['AliasHandle']
request['MembersBuffer']['Count'] = 2
request['MembersBuffer']['Sids'].append(si)
request['MembersBuffer']['Sids'].append(si2)
resp2 = dce.request(request)
resp2.dump()
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_hSamrAddMultipleMembersToAlias_hSamrRemoveMultipleMembersFromAliass(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
#resp = samr.hSamrEnumerateAliasesInDomain(dce, domainHandle)
#resp = samr.hSamrOpenAlias(dce, domainHandle, samr.DELETE, 1257)
#resp = samr.hSamrDeleteAlias(dce, resp['AliasHandle'])
resp = samr.hSamrCreateAliasInDomain(dce, domainHandle, self.test_group, samr.GROUP_ALL_ACCESS | samr.DELETE)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
guestSID = domainID + '-%d' % samr.DOMAIN_USER_RID_GUEST
sid1 = samr.RPC_SID()
sid1.fromCanonical(adminSID)
sid2 = samr.RPC_SID()
sid2.fromCanonical(guestSID)
si = samr.PSAMPR_SID_INFORMATION()
si['SidPointer'] = sid1
si2 = samr.PSAMPR_SID_INFORMATION()
si2['SidPointer'] = sid2
sidArray = samr.SAMPR_PSID_ARRAY()
sidArray['Sids'].append(si)
sidArray['Sids'].append(si2)
resp = samr.hSamrAddMultipleMembersToAlias(dce, aliasHandle, sidArray)
resp.dump()
resp = samr.hSamrRemoveMultipleMembersFromAlias(dce, aliasHandle, sidArray)
resp.dump()
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_SamrRemoveMemberFromForeignDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
request = samr.SamrRemoveMemberFromForeignDomain()
request['DomainHandle'] = domainHandle
request['MemberSid'].fromCanonical(adminSID)
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_SPECIAL_ACCOUNT') < 0:
raise
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_hSamrRemoveMemberFromForeignDomain(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
try:
resp = samr.hSamrRemoveMemberFromForeignDomain(dce, domainHandle, sid)
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_SPECIAL_ACCOUNT') < 0:
raise
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_SamrGetAliasMembership(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
guestSID = domainID + '-%d' % samr.DOMAIN_USER_RID_GUEST
sid1 = samr.RPC_SID()
sid1.fromCanonical(adminSID)
sid2 = samr.RPC_SID()
sid2.fromCanonical(guestSID)
si = samr.PSAMPR_SID_INFORMATION()
si['SidPointer'] = sid1
si2 = samr.PSAMPR_SID_INFORMATION()
si2['SidPointer'] = sid2
request = samr.SamrGetAliasMembership()
request['DomainHandle'] = domainHandle
request['SidArray']['Count'] = 2
request['SidArray']['Sids'].append(si)
request['SidArray']['Sids'].append(si2)
resp = dce.request(request)
resp.dump()
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_hSamrGetAliasMembership(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
#resp = samr.hSamrEnumerateAliasesInDomain(dce, domainHandle)
#resp = samr.hSamrOpenAlias(dce, domainHandle, samr.DELETE, 1268)
#resp = samr.hSamrDeleteAlias(dce, resp['AliasHandle'])
request = samr.SamrCreateAliasInDomain()
request['DomainHandle'] = domainHandle
request['AccountName'] = self.test_group
request['DesiredAccess'] = samr.GROUP_ALL_ACCESS | samr.DELETE
resp = dce.request(request)
aliasHandle = resp['AliasHandle']
relativeId = resp['RelativeId']
resp.dump()
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = relativeId
resp3 = dce.request(request)
resp3.dump()
# Let's extract the SID and remove the RID from one entry
sp = resp3['Sid'].formatCanonical()
domainID = '-'.join(sp.split('-')[:-1])
adminSID = domainID + '-%d' % samr.DOMAIN_USER_RID_ADMIN
sid = samr.RPC_SID()
sid.fromCanonical(adminSID)
guestSID = domainID + '-%d' % samr.DOMAIN_USER_RID_GUEST
sid1 = samr.RPC_SID()
sid1.fromCanonical(adminSID)
sid2 = samr.RPC_SID()
sid2.fromCanonical(guestSID)
si = samr.PSAMPR_SID_INFORMATION()
si['SidPointer'] = sid1
si2 = samr.PSAMPR_SID_INFORMATION()
si2['SidPointer'] = sid2
sidsArray = samr.SAMPR_PSID_ARRAY()
sidsArray['Sids'].append(si)
sidsArray['Sids'].append(si2)
try:
resp = samr.hSamrGetAliasMembership(dce, domainHandle, sidsArray)
resp.dump()
except Exception:
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
raise
request = samr.SamrDeleteAlias()
request['AliasHandle'] = aliasHandle
dce.request(request)
def test_SamrSetMemberAttributesOfGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
dce.request(request)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
resp = dce.request(request)
request = samr.SamrSetMemberAttributesOfGroup()
request['GroupHandle'] = resp['GroupHandle']
request['MemberId'] = samr.DOMAIN_USER_RID_ADMIN
request['Attributes'] = samr.SE_GROUP_ENABLED_BY_DEFAULT
resp = dce.request(request)
resp.dump()
def test_hSamrSetMemberAttributesOfGroup(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrConnect()
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['ServerName'] = self.server_name_string
dce.request(request)
request = samr.SamrOpenGroup()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED
request['GroupId'] = samr.DOMAIN_GROUP_RID_USERS
resp = dce.request(request)
resp = samr.hSamrSetMemberAttributesOfGroup(dce, resp['GroupHandle'], samr.DOMAIN_USER_RID_ADMIN, samr.SE_GROUP_ENABLED_BY_DEFAULT)
resp.dump()
def test_SamrGetUserDomainPasswordInformation(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
request = samr.SamrGetUserDomainPasswordInformation()
request['UserHandle'] = resp['UserHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrGetUserDomainPasswordInformation(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrOpenUser()
request['DomainHandle'] = domainHandle
request['DesiredAccess'] = samr.USER_READ_GENERAL | samr.USER_READ_PREFERENCES | samr.USER_READ_ACCOUNT
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
resp = dce.request(request)
resp = samr.hSamrGetUserDomainPasswordInformation(dce, resp['UserHandle'])
resp.dump()
def test_SamrGetDomainPasswordInformation(self):
dce, rpc_transport = self.connect()
request = samr.SamrGetDomainPasswordInformation()
request['Unused'] = NULL
resp = dce.request(request)
resp.dump()
def test_hSamrGetDomainPasswordInformation(self):
dce, rpc_transport = self.connect()
resp = samr.hSamrGetDomainPasswordInformation(dce)
resp.dump()
def test_SamrRidToSid(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrRidToSid()
request['ObjectHandle'] = domainHandle
request['Rid'] = samr.DOMAIN_USER_RID_ADMIN
dce.request(request)
def test_hSamrRidToSid(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrRidToSid(dce, domainHandle, samr.DOMAIN_USER_RID_ADMIN)
resp.dump()
def test_SamrSetDSRMPassword(self):
dce, rpc_transport = self.connect()
request = samr.SamrSetDSRMPassword()
request['Unused'] = NULL
request['UserId'] = samr.DOMAIN_USER_RID_ADMIN
request['EncryptedNtOwfPassword'] = '\x00'*16
# calls made to SamrSetDSRMPassword using NCACN_IP_TCP are rejected with RPC_S_ACCESS_DENIED.
try:
dce.request(request)
except Exception as e:
if self.protocol == 'ncacn_ip_tcp':
if str(e).find('rpc_s_access_denied') < 0:
raise
elif str(e).find('STATUS_NOT_SUPPORTED') < 0:
raise
def test_SamrValidatePassword(self):
dce, rpc_transport = self.connect()
request = samr.SamrValidatePassword()
request['ValidationType'] = samr.PASSWORD_POLICY_VALIDATION_TYPE.SamValidatePasswordReset
request['InputArg']['tag'] = samr.PASSWORD_POLICY_VALIDATION_TYPE.SamValidatePasswordReset
request['InputArg']['ValidatePasswordResetInput']['InputPersistedFields']['PresentFields'] = samr.SAM_VALIDATE_PASSWORD_HISTORY
request['InputArg']['ValidatePasswordResetInput']['InputPersistedFields']['PasswordHistory'] = NULL
request['InputArg']['ValidatePasswordResetInput']['ClearPassword'] = 'AAAAAAAAAAAAAAAA'
request['InputArg']['ValidatePasswordResetInput']['UserAccountName'] = 'Administrator'
kk = samr.SamrValidatePassword()
kk.fromString(request.getData())
try:
resp = dce.request(request)
resp.dump()
except Exception as e:
if str(e).find('rpc_s_access_denied') < 0:
raise
def test_hSamrValidatePassword(self):
dce, rpc_transport = self.connect()
inputArg = samr.SAM_VALIDATE_INPUT_ARG()
inputArg['tag'] = samr.PASSWORD_POLICY_VALIDATION_TYPE.SamValidatePasswordReset
inputArg['ValidatePasswordResetInput']['InputPersistedFields']['PresentFields'] = samr.SAM_VALIDATE_PASSWORD_HISTORY
inputArg['ValidatePasswordResetInput']['InputPersistedFields']['PasswordHistory'] = NULL
inputArg['ValidatePasswordResetInput']['ClearPassword'] = 'AAAAAAAAAAAAAAAA'
inputArg['ValidatePasswordResetInput']['UserAccountName'] = 'Administrator'
try:
resp = samr.hSamrValidatePassword(dce, inputArg)
resp.dump()
except Exception as e:
if str(e).find('rpc_s_access_denied') < 0:
raise
def test_SamrQuerySecurityObject(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrQuerySecurityObject()
request['ObjectHandle'] = domainHandle
request['SecurityInformation'] = dtypes.OWNER_SECURITY_INFORMATION | dtypes.GROUP_SECURITY_INFORMATION | dtypes.SACL_SECURITY_INFORMATION | dtypes.DACL_SECURITY_INFORMATION
resp = dce.request(request)
resp.dump()
def test_hSamrQuerySecurityObject(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrQuerySecurityObject(dce, domainHandle,
dtypes.OWNER_SECURITY_INFORMATION | dtypes.GROUP_SECURITY_INFORMATION | dtypes.SACL_SECURITY_INFORMATION | dtypes.DACL_SECURITY_INFORMATION)
resp.dump()
def test_SamrSetSecurityObject(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrLookupNamesInDomain(dce, domainHandle, (self.username,))
resp.dump()
resp = samr.hSamrOpenUser(dce, domainHandle, samr.USER_ALL_ACCESS | samr.USER_READ_GROUP_INFORMATION | samr.USER_WRITE_GROUP_INFORMATION, resp['RelativeIds']['Element'][0]['Data'])
resp.dump()
userHandle = resp['UserHandle']
request = samr.SamrQuerySecurityObject()
request['ObjectHandle'] = userHandle
request['SecurityInformation'] = dtypes.GROUP_SECURITY_INFORMATION
resp = dce.request(request)
resp.dump()
request = samr.SamrSetSecurityObject()
request['ObjectHandle'] = userHandle
request['SecurityInformation'] = dtypes.GROUP_SECURITY_INFORMATION
request['SecurityDescriptor'] = resp['SecurityDescriptor']
try:
resp = dce.request(request)
resp.dump()
except Exception as e:
if str(e).find('STATUS_BAD_DESCRIPTOR_FORMAT') <= 0:
raise
resp = samr.hSamrCloseHandle(dce, userHandle)
resp.dump()
def test_hSamrSetSecurityObject(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
resp = samr.hSamrLookupNamesInDomain(dce, domainHandle, (self.username,))
resp.dump()
resp = samr.hSamrOpenUser(dce, domainHandle, samr.USER_ALL_ACCESS | samr.USER_READ_GROUP_INFORMATION | samr.USER_WRITE_GROUP_INFORMATION, resp['RelativeIds']['Element'][0]['Data'])
resp.dump()
userHandle = resp['UserHandle']
resp = samr.hSamrQuerySecurityObject(dce, userHandle, dtypes.GROUP_SECURITY_INFORMATION)
resp.dump()
try:
resp = samr.hSamrSetSecurityObject(dce, userHandle, dtypes.GROUP_SECURITY_INFORMATION,resp['SecurityDescriptor'] )
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_BAD_DESCRIPTOR_FORMAT') <= 0:
raise
resp = samr.hSamrCloseHandle(dce, userHandle)
resp.dump()
def test_SamrChangePasswordUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateUser2InDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_account
request['AccountType'] = samr.USER_NORMAL_ACCOUNT
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED | samr.USER_READ_GENERAL | samr.DELETE
resp0 = dce.request(request)
resp0.dump()
oldPwd = ''
oldPwdHashNT = ntlm.NTOWFv1(oldPwd)
newPwd = 'ADMIN'
newPwdHashNT = ntlm.NTOWFv1(newPwd)
newPwdHashLM = ntlm.LMOWFv1(newPwd)
request = samr.SamrChangePasswordUser()
request['UserHandle'] = resp0['UserHandle']
request['LmPresent'] = 0
request['OldLmEncryptedWithNewLm'] = NULL
request['NewLmEncryptedWithOldLm'] = NULL
request['NtPresent'] = 1
request['OldNtEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(oldPwdHashNT, newPwdHashNT)
request['NewNtEncryptedWithOldNt'] = crypto.SamEncryptNTLMHash(newPwdHashNT, oldPwdHashNT)
request['NtCrossEncryptionPresent'] = 0
request['NewNtEncryptedWithNewLm'] = NULL
request['LmCrossEncryptionPresent'] = 1
request['NewLmEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(newPwdHashLM, newPwdHashNT)
resp = dce.request(request)
resp.dump()
# Delete the temp user
request = samr.SamrDeleteUser()
request['UserHandle'] = resp0['UserHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrChangePasswordUser(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateUser2InDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_account
request['AccountType'] = samr.USER_NORMAL_ACCOUNT
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED | samr.USER_READ_GENERAL | samr.DELETE
resp0 = dce.request(request)
resp0.dump()
resp = samr.hSamrChangePasswordUser(dce, resp0['UserHandle'], '', 'ADMIN')
resp.dump()
# Delete the temp user
request = samr.SamrDeleteUser()
request['UserHandle'] = resp0['UserHandle']
resp = dce.request(request)
resp.dump()
def test_SamrOemChangePasswordUser2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
# As you can guess by now, target machine must have the Administrator account with password admin
# NOTE: It's giving me WRONG_PASSWORD 'cause the target test server doesn't hold LM Hashes
# further testing is needed to verify this call works
oldPwd = 'admin'
oldPwdHashLM = ntlm.LMOWFv1(oldPwd)
newPwd = 'ADMIN'
newPwdHashLM = ntlm.LMOWFv1(newPwd)
try:
from Cryptodome.Cipher import ARC4
except Exception:
print("Warning: You don't have any crypto installed. You need pycryptodomex")
print("See https://pypi.org/project/pycryptodomex/")
request = samr.SamrOemChangePasswordUser2()
request['ServerName'] = ''
request['UserName'] = 'Administrator'
samUser = samr.SAMPR_USER_PASSWORD()
samUser['Buffer'] = b'A'*(512-len(newPwd)) + b(newPwd)
samUser['Length'] = len(newPwd)
pwdBuff = samUser.getData()
rc4 = ARC4.new(oldPwdHashLM)
encBuf = rc4.encrypt(pwdBuff)
request['NewPasswordEncryptedWithOldLm']['Buffer'] = encBuf
request['OldLmOwfPasswordEncryptedWithNewLm'] = crypto.SamEncryptNTLMHash(oldPwdHashLM, newPwdHashLM)
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_WRONG_PASSWORD') < 0:
raise
def test_SamrUnicodeChangePasswordUser2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateUser2InDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_account
request['AccountType'] = samr.USER_NORMAL_ACCOUNT
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED | samr.USER_READ_GENERAL | samr.DELETE
resp0 = dce.request(request)
resp0.dump()
oldPwd = ''
oldPwdHashNT = ntlm.NTOWFv1(oldPwd)
newPwd = 'ADMIN'
newPwdHashNT = ntlm.NTOWFv1(newPwd)
newPwdHashLM = ntlm.LMOWFv1(newPwd)
request = samr.SamrChangePasswordUser()
request['UserHandle'] = resp0['UserHandle']
request['LmPresent'] = 0
request['OldLmEncryptedWithNewLm'] = NULL
request['NewLmEncryptedWithOldLm'] = NULL
request['NtPresent'] = 1
request['OldNtEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(oldPwdHashNT, newPwdHashNT)
request['NewNtEncryptedWithOldNt'] = crypto.SamEncryptNTLMHash(newPwdHashNT, oldPwdHashNT)
request['NtCrossEncryptionPresent'] = 0
request['NewNtEncryptedWithNewLm'] = NULL
request['LmCrossEncryptionPresent'] = 1
request['NewLmEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(newPwdHashLM, newPwdHashNT)
resp = dce.request(request)
resp.dump()
oldPwd = 'ADMIN'
oldPwdHashNT = ntlm.NTOWFv1(oldPwd)
newPwd = "".join([random.choice(string.ascii_letters) for i in range(15)])
newPwdHashNT = ntlm.NTOWFv1(newPwd)
try:
from Cryptodome.Cipher import ARC4
except Exception:
print("Warning: You don't have any crypto installed. You need pycryptodomex")
print("See https://pypi.org/project/pycryptodomex/")
request = samr.SamrUnicodeChangePasswordUser2()
request['ServerName'] = ''
request['UserName'] = self.test_account
samUser = samr.SAMPR_USER_PASSWORD()
samUser['Buffer'] = b'A'*(512-len(newPwd)*2) + newPwd.encode('utf-16le')
samUser['Length'] = len(newPwd)*2
pwdBuff = samUser.getData()
rc4 = ARC4.new(oldPwdHashNT)
encBuf = rc4.encrypt(pwdBuff)
request['NewPasswordEncryptedWithOldNt']['Buffer'] = encBuf
request['OldNtOwfPasswordEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(oldPwdHashNT, newPwdHashNT)
request['LmPresent'] = 0
request['NewPasswordEncryptedWithOldLm'] = NULL
request['OldLmOwfPasswordEncryptedWithNewNt'] = NULL
try:
resp = dce.request(request)
resp.dump()
except samr.DCERPCSessionError as e:
if str(e).find('STATUS_PASSWORD_RESTRICTION') < 0:
raise
# Delete the temp user
request = samr.SamrDeleteUser()
request['UserHandle'] = resp0['UserHandle']
resp = dce.request(request)
resp.dump()
def test_hSamrUnicodeChangePasswordUser2(self):
dce, rpc_transport = self.connect()
domainHandle = self.get_domain_handle(dce)
request = samr.SamrCreateUser2InDomain()
request['DomainHandle'] = domainHandle
request['Name'] = self.test_account
request['AccountType'] = samr.USER_NORMAL_ACCOUNT
request['DesiredAccess'] = dtypes.MAXIMUM_ALLOWED | samr.USER_READ_GENERAL | samr.DELETE
resp0 = dce.request(request)
resp0.dump()
oldPwd = ''
oldPwdHashNT = ntlm.NTOWFv1(oldPwd)
newPwd = 'ADMIN'
newPwdHashNT = ntlm.NTOWFv1(newPwd)
newPwdHashLM = ntlm.LMOWFv1(newPwd)
request = samr.SamrChangePasswordUser()
request['UserHandle'] = resp0['UserHandle']
request['LmPresent'] = 0
request['OldLmEncryptedWithNewLm'] = NULL
request['NewLmEncryptedWithOldLm'] = NULL
request['NtPresent'] = 1
request['OldNtEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(oldPwdHashNT, newPwdHashNT)
request['NewNtEncryptedWithOldNt'] = crypto.SamEncryptNTLMHash(newPwdHashNT, oldPwdHashNT)
request['NtCrossEncryptionPresent'] = 0
request['NewNtEncryptedWithNewLm'] = NULL
request['LmCrossEncryptionPresent'] = 1
request['NewLmEncryptedWithNewNt'] = crypto.SamEncryptNTLMHash(newPwdHashLM, newPwdHashNT)
resp = dce.request(request)
resp.dump()
try:
resp = samr.hSamrUnicodeChangePasswordUser2(dce, '', self.test_account, 'ADMIN', 'betus')
resp.dump()
except Exception as e:
if str(e).find('STATUS_PASSWORD_RESTRICTION') < 0:
raise
# Delete the temp user
request = samr.SamrDeleteUser()
request['UserHandle'] = resp0['UserHandle']
resp = dce.request(request)
resp.dump()
@pytest.mark.remote
class SAMRTestsSMBTransport(SAMRTests, unittest.TestCase):
protocol = "ncacn_np"
transfer_syntax = DCERPCTests.TRANSFER_SYNTAX_NDR
string_binding_formatting = DCERPCTests.STRING_BINDING_MAPPER
@pytest.mark.remote
class SAMRTestsSMBTransport64(SAMRTests, unittest.TestCase):
protocol = "ncacn_np"
transfer_syntax = DCERPCTests.TRANSFER_SYNTAX_NDR64
string_binding_formatting = DCERPCTests.STRING_BINDING_MAPPER
@pytest.mark.remote
class SAMRTestsTCPTransport(SAMRTests, unittest.TestCase):
protocol = "ncacn_ip_tcp"
transfer_syntax = DCERPCTests.TRANSFER_SYNTAX_NDR
string_binding_formatting = DCERPCTests.STRING_BINDING_MAPPER
@pytest.mark.remote
class SAMRTestsTCPTransport64(SAMRTests, unittest.TestCase):
protocol = "ncacn_ip_tcp"
transfer_syntax = DCERPCTests.TRANSFER_SYNTAX_NDR64
string_binding_formatting = DCERPCTests.STRING_BINDING_MAPPER
# Process command-line arguments.
if __name__ == "__main__":
unittest.main(verbosity=1)
| 42.898326 | 451 | 0.642026 | 9,337 | 102,527 | 6.876727 | 0.064796 | 0.031927 | 0.038391 | 0.026928 | 0.826844 | 0.79311 | 0.780354 | 0.753037 | 0.720471 | 0.689042 | 0 | 0.008174 | 0.249407 | 102,527 | 2,389 | 452 | 42.916283 | 0.826173 | 0.043598 | 0 | 0.802001 | 0 | 0 | 0.109008 | 0.026902 | 0 | 0 | 0 | 0 | 0.015798 | 1 | 0.048447 | false | 0.032649 | 0.007372 | 0 | 0.06951 | 0.002106 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e6be88e249f4f24e4327f81d30ee9a90b84ae50f | 121 | py | Python | modules/__init__.py | UMCUGenetics/clarity_utils | 85c79a906bc587fbdc67d1329e0bc56923769d53 | [
"MIT"
] | 1 | 2018-06-18T16:15:48.000Z | 2018-06-18T16:15:48.000Z | modules/__init__.py | UMCUGenetics/clarity_utils | 85c79a906bc587fbdc67d1329e0bc56923769d53 | [
"MIT"
] | 2 | 2021-03-31T20:12:34.000Z | 2021-11-22T14:33:22.000Z | modules/__init__.py | UMCUGenetics/clarity_utils_usf | 85c79a906bc587fbdc67d1329e0bc56923769d53 | [
"MIT"
] | null | null | null | import modules.useq_mail
import modules.useq_template
import modules.useq_illumina_parsers
import modules.useq_nextcloud
| 24.2 | 36 | 0.900826 | 17 | 121 | 6.117647 | 0.470588 | 0.5 | 0.653846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066116 | 121 | 4 | 37 | 30.25 | 0.920354 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e6d2c50fe7a4dfcebaf71f43e9b2dd22c31fd02c | 316 | py | Python | platform.py | Venator2013/platform-mt2502 | 8960143be395ebd36e9a25313a4af434afb4018e | [
"MIT"
] | null | null | null | platform.py | Venator2013/platform-mt2502 | 8960143be395ebd36e9a25313a4af434afb4018e | [
"MIT"
] | null | null | null | platform.py | Venator2013/platform-mt2502 | 8960143be395ebd36e9a25313a4af434afb4018e | [
"MIT"
] | 1 | 2021-12-29T14:49:48.000Z | 2021-12-29T14:49:48.000Z | from platform import system
from platformio.managers.platform import PlatformBase
class Mt2502Platform(PlatformBase):
def configure_default_packages(self, variables, target):
return PlatformBase.configure_default_packages(self, variables,
target)
| 35.111111 | 71 | 0.677215 | 28 | 316 | 7.5 | 0.607143 | 0.133333 | 0.228571 | 0.266667 | 0.409524 | 0.409524 | 0 | 0 | 0 | 0 | 0 | 0.017467 | 0.275316 | 316 | 8 | 72 | 39.5 | 0.899563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0.166667 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
fc1dd48ece48bf0e2ce7c1c93153566df5ef321f | 8,572 | py | Python | colour/plotting/tests/test_volume.py | tjdcs/colour | 09413da71b5da57408eb812797c5db1300d4791a | [
"BSD-3-Clause"
] | null | null | null | colour/plotting/tests/test_volume.py | tjdcs/colour | 09413da71b5da57408eb812797c5db1300d4791a | [
"BSD-3-Clause"
] | null | null | null | colour/plotting/tests/test_volume.py | tjdcs/colour | 09413da71b5da57408eb812797c5db1300d4791a | [
"BSD-3-Clause"
] | null | null | null | # !/usr/bin/env python
"""Define the unit tests for the :mod:`colour.plotting.volume` module."""
import numpy as np
import unittest
from matplotlib.pyplot import Axes, Figure
from colour.plotting import plot_RGB_colourspaces_gamuts, plot_RGB_scatter
from colour.plotting.volume import nadir_grid, RGB_identity_cube
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"TestNadirGrid",
"TestRGBIdentityCube",
"TestPlotRGBColourspacesGamuts",
"TestPlotRGBScatter",
]
class TestNadirGrid(unittest.TestCase):
"""
Define :func:`colour.plotting.volume.nadir_grid` definition unit tests
methods.
"""
def test_nadir_grid(self):
"""Test :func:`colour.plotting.volume.nadir_grid` definition."""
quads, faces_colours, edges_colours = nadir_grid(segments=1)
np.testing.assert_almost_equal(
quads,
np.array(
[
[
[-1.00000000, -1.00000000, 0.00000000],
[1.00000000, -1.00000000, 0.00000000],
[1.00000000, 1.00000000, 0.00000000],
[-1.00000000, 1.00000000, 0.00000000],
],
[
[-1.00000000, -1.00000000, 0.00000000],
[0.00000000, -1.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000],
[-1.00000000, 0.00000000, 0.00000000],
],
[
[-1.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000],
[0.00000000, 1.00000000, 0.00000000],
[-1.00000000, 1.00000000, 0.00000000],
],
[
[0.00000000, -1.00000000, 0.00000000],
[1.00000000, -1.00000000, 0.00000000],
[1.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000],
],
[
[0.00000000, 0.00000000, 0.00000000],
[1.00000000, 0.00000000, 0.00000000],
[1.00000000, 1.00000000, 0.00000000],
[0.00000000, 1.00000000, 0.00000000],
],
[
[-1.00000000, -0.00100000, 0.00000000],
[1.00000000, -0.00100000, 0.00000000],
[1.00000000, 0.00100000, 0.00000000],
[-1.00000000, 0.00100000, 0.00000000],
],
[
[-0.00100000, -1.00000000, 0.00000000],
[0.00100000, -1.00000000, 0.00000000],
[0.00100000, 1.00000000, 0.00000000],
[-0.00100000, 1.00000000, 0.00000000],
],
]
),
decimal=7,
)
np.testing.assert_almost_equal(
faces_colours,
np.array(
[
[0.25000000, 0.25000000, 0.25000000, 0.10000000],
[0.00000000, 0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.00000000, 1.00000000],
[0.00000000, 0.00000000, 0.00000000, 1.00000000],
]
),
decimal=7,
)
np.testing.assert_almost_equal(
edges_colours,
np.array(
[
[0.50000000, 0.50000000, 0.50000000, 0.50000000],
[0.75000000, 0.75000000, 0.75000000, 0.25000000],
[0.75000000, 0.75000000, 0.75000000, 0.25000000],
[0.75000000, 0.75000000, 0.75000000, 0.25000000],
[0.75000000, 0.75000000, 0.75000000, 0.25000000],
[0.00000000, 0.00000000, 0.00000000, 1.00000000],
[0.00000000, 0.00000000, 0.00000000, 1.00000000],
]
),
decimal=7,
)
class TestRGBIdentityCube(unittest.TestCase):
"""
Define :func:`colour.plotting.volume.RGB_identity_cube` definition unit
tests methods.
"""
def test_RGB_identity_cube(self):
"""Test :func:`colour.plotting.volume.RGB_identity_cube` definition."""
vertices, RGB = RGB_identity_cube(1, 1, 1)
np.testing.assert_almost_equal(
vertices,
np.array(
[
[
[0.00000000, 0.00000000, 0.00000000],
[1.00000000, 0.00000000, 0.00000000],
[1.00000000, 1.00000000, 0.00000000],
[0.00000000, 1.00000000, 0.00000000],
],
[
[0.00000000, 0.00000000, 1.00000000],
[1.00000000, 0.00000000, 1.00000000],
[1.00000000, 1.00000000, 1.00000000],
[0.00000000, 1.00000000, 1.00000000],
],
[
[0.00000000, 0.00000000, 0.00000000],
[1.00000000, 0.00000000, 0.00000000],
[1.00000000, 0.00000000, 1.00000000],
[0.00000000, 0.00000000, 1.00000000],
],
[
[0.00000000, 1.00000000, 0.00000000],
[1.00000000, 1.00000000, 0.00000000],
[1.00000000, 1.00000000, 1.00000000],
[0.00000000, 1.00000000, 1.00000000],
],
[
[0.00000000, 0.00000000, 0.00000000],
[0.00000000, 1.00000000, 0.00000000],
[0.00000000, 1.00000000, 1.00000000],
[0.00000000, 0.00000000, 1.00000000],
],
[
[1.00000000, 0.00000000, 0.00000000],
[1.00000000, 1.00000000, 0.00000000],
[1.00000000, 1.00000000, 1.00000000],
[1.00000000, 0.00000000, 1.00000000],
],
]
),
decimal=7,
)
np.testing.assert_almost_equal(
RGB,
np.array(
[
[0.50000000, 0.50000000, 0.00000000],
[0.50000000, 0.50000000, 1.00000000],
[0.50000000, 0.00000000, 0.50000000],
[0.50000000, 1.00000000, 0.50000000],
[0.00000000, 0.50000000, 0.50000000],
[1.00000000, 0.50000000, 0.50000000],
]
),
decimal=7,
)
class TestPlotRGBColourspacesGamuts(unittest.TestCase):
"""
Define :func:`colour.plotting.volume.plot_RGB_colourspaces_gamuts`
definition unit tests methods.
"""
def test_plot_RGB_colourspaces_gamuts(self):
"""
Test :func:`colour.plotting.volume.plot_RGB_colourspaces_gamuts`
definition.
"""
figure, axes = plot_RGB_colourspaces_gamuts(
["ITU-R BT.709", "ACEScg", "S-Gamut"],
show_spectral_locus=True,
face_colours=[0.18, 0.18, 0.18],
chromatically_adapt=True,
)
self.assertIsInstance(figure, Figure)
self.assertIsInstance(axes, Axes)
class TestPlotRGBScatter(unittest.TestCase):
"""
Define :func:`colour.plotting.volume.plot_RGB_scatter` definition unit
tests methods.
"""
def test_plot_RGB_scatter(self):
"""Test :func:`colour.plotting.volume.plot_RGB_scatter` definition."""
figure, axes = plot_RGB_scatter(
np.random.random((128, 128, 3)), "ITU-R BT.709"
)
self.assertIsInstance(figure, Figure)
self.assertIsInstance(axes, Axes)
if __name__ == "__main__":
unittest.main()
| 36.476596 | 79 | 0.478651 | 800 | 8,572 | 5.00625 | 0.1325 | 0.258427 | 0.428714 | 0.269663 | 0.753558 | 0.73608 | 0.707116 | 0.659925 | 0.561798 | 0.513608 | 0 | 0.410847 | 0.402007 | 8,572 | 234 | 80 | 36.632479 | 0.370464 | 0.082478 | 0 | 0.516667 | 0 | 0 | 0.038516 | 0.008401 | 0 | 0 | 0 | 0 | 0.05 | 1 | 0.022222 | false | 0 | 0.027778 | 0 | 0.072222 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc3c580908a81ec9c129d2c06607cb98570f1dec | 3,992 | py | Python | booking/tests/test_forms.py | bnico99/foruminternational | 8d577d3d929277b88d9c6deb1a77f90fb34ad469 | [
"BSD-3-Clause"
] | null | null | null | booking/tests/test_forms.py | bnico99/foruminternational | 8d577d3d929277b88d9c6deb1a77f90fb34ad469 | [
"BSD-3-Clause"
] | 4 | 2021-04-08T21:11:19.000Z | 2021-06-10T19:40:34.000Z | booking/tests/test_forms.py | bnico99/foruminternational | 8d577d3d929277b88d9c6deb1a77f90fb34ad469 | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase
from booking.forms import BookingForm
import datetime
class TestForms(TestCase):
def test_valid(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': datetime.time(hour=8, minute=0),
'duration': 12,
'student': 'yes',
'number_people': 5,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertTrue(form.is_valid(), form.errors)
# should not be possible because it is not selectable
def test_invalid_start(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': datetime.time(hour=0, minute=1),
'duration': 12,
'student': 'yes',
'number_people': 5,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertFalse(form.is_valid(), form.errors)
# should not be possible because it is not selectable
def test_invalid_start2(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': '11:30',
'duration': 12,
'student': 'yes',
'number_people': 5,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertFalse(form.is_valid(), form.errors)
# should not be possible because it is not selectable
def test_invalid_duration(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 8),
'start_time': '16:00',
'duration': 5,
'student': 'yes',
'number_people': 5,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertFalse(form.is_valid(), form.errors)
def test_valid_nostudent(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 8),
'start_time': '16:00',
'duration': 6,
'student': 'no',
'number_people': 5,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertTrue(form.is_valid(), form.errors)
def test_valid_manypeople(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 8),
'start_time': '16:00',
'duration': 6,
'student': 'no',
'number_people': 500,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertTrue(form.is_valid(), form.errors)
def test_invalid_zeropeople(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': '11:00',
'duration': 12,
'student': 'no',
'number_people': 0,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertFalse(form.is_valid(), form.errors)
def test_invalid_minuspeople(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': '11:00',
'duration': 12,
'student': 'no',
'number_people': -6,
'refrigerator': 'yes',
'occasion': 'party'})
self.assertFalse(form.is_valid(), form.errors)
def test_valid_occasion(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': '11:00',
'duration': 12,
'student': 'no',
'number_people': 10,
'refrigerator': 'yes',
'occasion': '123'})
self.assertTrue(form.is_valid(), form.errors)
def test_invalid_nooccasion(self):
form = BookingForm(data={
'date': datetime.date(2020, 5, 9),
'start_time': '11:00',
'duration': 12,
'student': 'no',
'number_people': 10,
'refrigerator': 'yes',
'occasion': ''})
self.assertFalse(form.is_valid(), form.errors)
| 32.991736 | 58 | 0.515531 | 408 | 3,992 | 4.92402 | 0.154412 | 0.034843 | 0.094574 | 0.114485 | 0.893977 | 0.893977 | 0.893977 | 0.876058 | 0.870085 | 0.870085 | 0 | 0.049378 | 0.335421 | 3,992 | 120 | 59 | 33.266667 | 0.707878 | 0.038828 | 0 | 0.778846 | 0 | 0 | 0.197495 | 0 | 0 | 0 | 0 | 0 | 0.096154 | 1 | 0.096154 | false | 0 | 0.028846 | 0 | 0.134615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc4ca5a6149e831a603348d0a136d6f7ac0189d1 | 34,738 | py | Python | anmodel/channels.py | ModelDBRepository/264519 | 7d8105281b65382b8d904251966d0e130f220f13 | [
"BSD-3-Clause"
] | null | null | null | anmodel/channels.py | ModelDBRepository/264519 | 7d8105281b65382b8d904251966d0e130f220f13 | [
"BSD-3-Clause"
] | null | null | null | anmodel/channels.py | ModelDBRepository/264519 | 7d8105281b65382b8d904251966d0e130f220f13 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
This is the channel module for Averaged Neuron (AN) model.
"""
__author__ = 'Fumiya Tatsuki, Kensuke Yoshida, Tetsuya Yamada, \
Takahiro Katsumata, Shoi Shi, Hiroki R. Ueda'
__status__ = 'Published'
__version__ = '1.0.0'
__date__ = '15 May 2020'
import os
import sys
"""
LIMIT THE NUMBER OF THREADS!
change local env variables BEFORE importing numpy
"""
os.environ['OMP_NUM_THREADS'] = '1'
os.environ['NUMEXPR_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
import numpy as np
from typing import Optional
import params
params = params.Constants()
class Base:
""" Keep basic attributes and helper functions for each channel.
Parameters
----------
g : float
channel condactance
e : float
equilibrium (reversal) potential for a channel
Attributes
----------
g : float
channel conductance
e : float
equilibrium (reversal) potential for a channel
"""
def __init__(self, g: float, e: float) -> None:
self.g = g
self.e = e
def set_g(self, new_g: float) -> None:
""" Set a new conductance for a channel.
Parameters
----------
new_g : float
new conductance set for a channel
"""
self.g = new_g
def get_g(self) -> float:
''' Get current channel conductance value.
Returns
----------
float
current conductance
'''
return self.g
def set_e(self, new_e: float) -> None:
""" Set a new equiribrium potential for a channel
Parameters
----------
new equiribrium potential for a channel
"""
self.e = new_e
def get_e(self) -> float:
''' Get current equilibrium potential.
Returns:
----------
float
current equilibrium potential.
'''
return self.e
class Leak(Base):
""" Leak channel (sodium / potassium).
Leak channel can be divided into leak sodium channel and leak
potassium channel. Usually it doesn't have to be divided. If
it is separated, you can conduct more detailed analysis.
Parameters
----------
g : float
channel conductance
e : float
equiribrium potential for the channel
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
"""
def __init__(self, g: Optional[int]=None, e: float=params.vL) -> None:
super().__init__(g, e)
def i(self, v:float) -> float:
""" Calculate current that flows through the channel.
I = g * (v - e).
v : membrane potential
e : equiribrium potential (for sodium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.g * (v - self.e)
def set_div(self, vnal: float=params.vNaL, vkl: float=params.vK) -> None:
""" Setting about deviding leak channel into Na leak and K leak.
Conductances of leak potassium channel and leak sodium channel are
defined as:
gkl = gleak * (vleak - vnal) / (vkl - vnal)
gnal = gleak * (vleak - vkl) / (vnal - vkl).
These definition sattisfies gleak=gkl+gnal.
Parameters
----------
vk : float
equilibrium potential for leak potassium channel
vnal : float
equilibrium potential for leak sodium channel
"""
self.vnal = vnal
self.vkl = vkl
self.gnal = self.g * (self.e - self.vkl) / (self.vnal - self.vkl)
self.gkl = self.g * (self.e - self.vnal) / (self.vkl - self.vnal)
def set_gna(self, new_gnal: float) -> None:
""" Set a new conductance for a leak sodium channel.
Parameters
----------
new_gna : float
new conductance set for a leak sodium channel
"""
self.gnal = new_gnal
def set_gk(self, new_gkl: float) -> None:
""" Set a new conductance for a leak potassium channel.
Parameters
----------
new_gk : float
new conductance set for a leak potassium channel
"""
self.gkl = new_gkl
def ikl(self, v: float) -> float:
""" Calculate current that flows through the channel.
I = g * (v - e).
v : membrane potential
e : equiribrium potential (for sodium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.gkl * (v - self.vkl)
def inal(self, v: float) -> float:
""" Calculate current that flows through the channel.
I = g * (v - e).
v : membrane potential
e : equiribrium potential (for sodium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.gnal * (v - self.vnal)
def i_div(self, v: float) -> float:
return self.inal(v) + self.ikl(v)
class NavHH(Base):
""" Hodgkin-Huxley type volatage gated sodium channel.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
channel conductance, default None
e : float
equiribrium potential for the channel,
default anmodel.params.Constatns.VNa.
Attributes
----------
g : float
HH type sodium channel conductance
e : float
equiribrium potential for the channel
(in most cases, sodium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vNa) -> None:
super().__init__(g, e)
def am(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for activation states.
In the two state model, activation variable m can be discribed as,
dm/dt = a(1-m) + bm.
In this method, 'a' for m can be calculated.
Parameters
----------
v : float
membrane potential
Returns
----------
float
transition rate for activation states
"""
if v == -33.:
return 1.
else:
return 0.1 * (v+33.0) / (1.0-np.exp(-(v+33.0)/10.0))
def bm(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for activation states.
'b' in the two state model for m can be calculated.
Parameters
----------
v : float
membrane potential
Returns
----------
float
transition rate for activation states
"""
return 4.0 * np.exp(-(v+53.7)/12.0)
def m_inf(self, v: float) -> float:
""" Calculate activation variable for steady state.
In the steady state two state model for activation variable, dm/dt = 0.
Now, m can be calculated from 0 = a(1-m) + bm.
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return self.am(v) / (self.am(v) + self.bm(v))
def ah(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for inactivation states.
'a' in the two state model for inactivation variable h can be calculated.
Parameters
----------
v : float
membrane potential
Results
----------
float
transition rate for inactivation states
"""
return 0.07 * np.exp(-(v+50.0)/10.0)
def bh(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for inactivation states.
'b' in the two state model for h can be calculated.
Parameters
----------
v : float
membrane potential
Results
----------
float
transition rate for inactivation states
"""
return 1.0 / (1.0 + np.exp(-(v+20.0)/10.0))
def h_inf(self, v: float) -> float:
""" Calculate inactivation rate in steady state.
In the steady state two state model for activation variable, dh/dt = 0.
Now, m can be calculated from 0 = a(1-h) + bh.
Note
----------
This isn't used in the AN model (not steady state, in this case).
Parameters
----------
v : float
membrane potential
Returns
----------
float
inactivation variable for the channel
"""
return self.ah(v) / (self.ah(v) + self.bh(v))
def h_tau(self, v: float) -> float:
return 1 / 4 * (self.ah(v)+self.bh(v))
def dhdt(self, v: float, h: float) -> float:
""" Differential equation for inactiavtion variable (not in steady state).
In the two state model, inactivation variable can be formulated as
dh/dt = a(1-h) + bh.
In this case, right side is multiplied by constant.
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
dh/dt
"""
return 4.0 * (self.ah(v)*(1-h) - self.bh(v)*h)
def i(self, v: float, h: float) -> float:
""" Calculate current that flows through the channel.
I = g * m^3 * h * (v - e).
m : activation variable (steady state)
h : inactiavtion variable
v : membrane potential
e : equiribrium potential (for sodium ion)
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
current that flows through the channel
"""
return self.g * (self.m_inf(v)**3) * h * (v-self.e)
class KvHH(Base):
""" Hodgkin-Huxley type voltage gated potassium channel (delayed rectifier).
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
channel conductance, default None
e : float
equiribrium potential for the channel,
default anmodel.params.Constants.VK.
Attributes
----------
g : float
HH type potassium channel conductance
e : float
equiribrium potential for the channel
(in most cases, potassium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vK) -> None:
super().__init__(g, e)
def an(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for activation states.
'a' in the two state model for activation state n can be calculated.
Parameters
----------
v : float
membrane potential
Results
----------
float
transition rate for activation states
"""
if v == -34.:
return 0.1
else:
return 0.01 * (v+34.0) / (1.0-np.exp(-(v+34.0)/10.0))
def bn(self, v: float) -> float:
""" Calculate voltage-dependent transition rate for activation states.
'b' in the two state model for activation state n can be calculated.
Parameters
----------
v : float
membrane potential
Results
----------
float
transition rate for activation states
"""
return 0.125 * np.exp(-(v+44.0)/25.0)
def n_inf(self, v: float) -> float:
""" Calculate activation variable in steady state.
In the steady state two state model for activation state, dn/dt = 0.
Now, n can be calculated from 0 = a(1-n) + bn.
Note
----------
This isn't used in the AN model (not steady state, in this case).
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return self.an(v) / (self.an(v)+self.bn(v))
def n_tau(self, v: float) -> float:
return 1 / (4 * (self.an(v) + self.bn(v)))
def dndt(self, v: float, n: float) -> float:
""" Differential equation for actiavtion variable (not in steady state).
In the two state model, activation variable can be formulated as
dn/dt = a(1-n) + bn.
In this case, right side is multiplied by constant.
Parameters
----------
v : float
membrane potential
n : float
activation variable
Returns
----------
float
dn/dt
"""
return 4.0 * (self.an(v)*(1-n)-self.bn(v)*n)
def i(self, v: float, n: float) -> float:
""" Calculate current that flows through the channel.
I = g * n^4 * (v - e).
n : activation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
current that flows through the channel
"""
return self.g * n**4 * (v-self.e)
class KvA(Base):
""" Fast A-type potassium channel.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
channel conductance, default None
e : float
equiribrium potential for the channel,
default anmodel.params.Constants.VK.
tau : float
time constant for the differential equation dh/dt,
default anmodel.params.Constants.tauA
Attribute
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, potassium equiribrium potential)
tau : float
time constant for the differential equation dh/dt
"""
def __init__(self, g: Optional[float]=None, e: float=params.vK,
tau: float=params.tau_a) -> None:
super().__init__(g, e)
self.tau = tau
def m_inf(self, v: float) -> float:
""" Calculate activation variable in steady state.
In this case, we use fitted formulation, not two state model.
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return 1.0 / (1.0 + np.exp(-(v+50.0)/20.0))
def h_inf(self, v: float) -> float:
""" Calculate inactivation variable in steady state.
We use fitted formulation, not two state model.
Note
----------
This isn't used in the AN model (not steady state, in this case).
Parameters
----------
v : float
membrane potential
Results
----------
float
inactivation variable for the channel
"""
return 1.0 / (1.0 + np.exp((v+80.0)/6.0))
def dhdt(self, v: float, h: float) -> float:
""" Differential equation for inactiavtion variable (not in steady state).
dh/dt = (h_inf - h) / tau
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
dh/dt
"""
return (self.h_inf(v)-h) / self.tau
def i(self, v: float, h: float) -> float:
""" Calculate current that flows through the channel.
I = g * m^3 * h * (v - e).
m : activation variable (steady state)
h : inactivation variable
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
current that flows through the channel
"""
return self.g * (self.m_inf(v)**3) * h * (v-self.e)
class KvSI(Base):
""" Slowly inactivating potassium channel (a kind of delayed rectifier).
Slowly inactivating potassium channel doesn't have inactivation variable.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float
channel conductance
e : float
equiribrium potential for the channel
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, potassium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vK) -> None:
super().__init__(g, e)
def m_inf(self, v: float) -> float:
""" Calculate activation variable in steady state.
In this case, we use fitted formulation, not two state model.
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return 1.0 / (1.0 + np.exp(-(v+34.0)/6.5))
def m_tau(self, v: float) -> float:
""" Calculate time constant for the differential equation dm/dt
Parameters
----------
v : float
membrane potential
Results
----------
float
time constant for the differential equation dm/dt
"""
return 8.0 / (np.exp(-(v+55.0)/30.0) + np.exp((v+55.0)/30.0))
def dmdt(self, v: float, m: float) -> float:
""" Differential equation for actiavtion variable (not in steady state).
dm/dt = (m_inf - m) / tau
Parameters
----------
v : float
membrane potential
m : float
activation variable
Returns
----------
float
dm/dt
"""
return (self.m_inf(v)-m) / self.m_tau(v)
def i(self, v: float, m: float) -> float:
""" Calculate current that flows through the channel.
I = g * m * (v - e).
m : activation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
h : float
inactivation variable
Returns
----------
float
current that flows through the channel
"""
return self.g * m * (v-self.e)
class Cav(Base):
""" Voltage-gated calcium channel.
Parameters
----------
g : float
channel conductance
e : float
equiribrium potential for the channel
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, calcium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vCa) -> None:
super().__init__(g, e)
def m_inf(self, v: float) -> float:
""" Calculate activation variable in steady state.
In this case, we use fitted formulation, not two state model.
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return 1.0 / (1.0 + np.exp(-(v+20.0)/9.0))
def i(self, v: float) -> float:
""" Calculate current that flows through the channel.
I = g * m^2 * (v - e).
m : activation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.g * self.m_inf(v)**2 * (v-self.e)
class NaP(Base):
""" Persistent sodium channel.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float
channel conductance
e : float
equiribrium potential for the channel
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, sodium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vNa) -> None:
super().__init__(g, e)
def m_inf(self, v: float) -> float:
""" Calculate activation variable in steady state.
In this case, we use fitted formulation, not two state model.
Parameters
----------
v : float
membrane potential
Results
----------
float
activation variable for the channel
"""
return 1.0 / (1.0 + np.exp(-(v+55.7)/7.7))
def i(self, v: float) -> float:
""" Calculate current that flows through the channel.
I = g * m^3 * (v - e).
m : activation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.g * self.m_inf(v)**3 * (v-self.e)
class KCa(Base):
""" Calcium-dependent potassium channel.
Parameters
----------
g : float
channel conductance
e : float
equiribrium potential for the channel
kd_ca : float
dissociation constant of calcium-dependent pottasium channels
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, calcium equiribrium potential)
kd_ca : float
dissociation constant of calcium-dependent pottasium channels
"""
def __init__(self, g: Optional[float]=None, e: float=params.vK,
kd_ca: float=params.kd_ca) -> None:
super().__init__(g, e)
self.kd_ca = kd_ca
def m_inf(self, ca: float) -> float:
""" Calculate activation variable in steady state.
In this case, we use fitted formulation, not two state model.
Parameters
----------
ca : float
intracellular calcium concentration
Results
----------
float
activation variable for the channel
"""
return 1.0 / (1.0 + (self.kd_ca/ca)**(3.5))
def i(self, v: float, ca: float) -> float:
""" Calculate current that flows through the channel.
I = g * m^3 * (v - e).
m : activation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
ca : float
intracellular calcium concentration
Returns
----------
float
current that flows through the channel
"""
return self.g * self.m_inf(ca) * (v-self.e)
class KIR(Base):
""" Inwardly rectifying potassium channel.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
channel conductance, default None
e : float
equiribrium potential for the channel
Attributes
----------
g : float
the channel conductance
e : float
equiribrium potential for the channel
(in most cases, calcium equiribrium potential)
"""
def __init__(self, g: Optional[float]=None, e: float=params.vK) -> None:
super().__init__(g, e)
def h_inf(self, v: float) -> float:
""" Calculate inactivation variable in steady state.
We use fitted formulation, not two state model.
Parameters
----------
v : float
membrane potential
Results
----------
float
inactivation variable for the channel
"""
return 1.0/(1.0 + np.exp((v + 75.0)/4.0))
def i(self, v: float) -> float:
""" Calculate current that flows through the channel.
I = g * h * (v - e).
h : inactivation variable (steady state)
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
Returns
----------
float
current that flows through the channel
"""
return self.g * self.h_inf(v) * (v-self.e)
class AMPAR(Base):
""" AMPA receptor.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
receptor conductance, default None
e : float
equiribrium potential for the receptor
s_a : float
coefficient of f(V)
tau_a : float
time constant for differential equation of gating variable s
Attributes
----------
g : float
the receptor conductance
e : float
equiribrium potential for the receptor
s_a : float
coefficient of f(V)
tau_a : float
time constant for differential equation of gating variable s
"""
def __init__(self, g: Optional[float]=None, e: float=params.vAMPAR,
s_a: float=params.s_a_ampar, s_tau: float=params.s_tau_ampar) -> None:
super().__init__(g, e)
self.s_a = s_a
self.tau_a = s_tau
def f(self, v: float) -> float:
""" Function that converts membrane potential into firing rate.
Parameters
----------
v : float
membrane potential
Returns
----------
float
f(v) : firing rate
"""
return 1.0 / (1.0 + np.exp(-(v-20.0)/2.0))
def dsdt(self, v: float, s: float) -> float:
""" Differential equation for gating variable s.
ds/dt = af(Vpre) - s/tau
Parameters
----------
v : float
membrane potential
s : float
gating variable
Returns
----------
float
ds/dt
"""
return self.s_a * self.f(v) - s/self.tau_a
def i(self, v: float, s: float) -> float:
""" Calculate current that flows by the receptor.
I = g * s * (v - e).
g : gating variable
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
s : float
gating varriable
Returns
----------
float
current that flows by the receptor
"""
return self.g * s * (v - self.e)
class NMDAR(Base):
""" NMDA receptor.
The gating variable of NMDA receptor follows a two first-order
kinetic scheme so that EPSC has a slower rise phase and saturates
at high presynaptic firing rates.
Note
----------
This formulation is from Compute et al., 2003 and Wang, 1999
Parameters
----------
g : float or None
receptor conductance, default None
e : float
equiribrium potential for the receptor
s_a : float
coefficient of (1 - s)
s_tau : float
time constant for differential equation of gating variable s
x_a : float
coefficient of f(V)
x_tau : float
time constant for differential equation of second-order
gating variable x
Attributes
----------
g : float
the receptor conductance
e : float
equiribrium potential for the receptor
s_a : float
coefficient of f(V)
tau_a : float
time constant for differential equation of gating variable
"""
def __init__(self, g: Optional[float]=None, e: float=params.vNMDAR,
s_a: float=params.s_a_nmdar, s_tau: float=params.s_tau_nmdar,
x_a: float=params.x_a_nmdar, x_tau: float=params.x_tau_nmdar,
ion: bool=False, ex_mg: Optional[float]=None) -> None:
super().__init__(g, e)
self.s_a = s_a
self.s_tau = s_tau
self.x_a = x_a
self.x_tau = x_tau
self.ion = ion
self.ex_mg = ex_mg
def f(self, v: float) -> float:
""" Function that converts membrane potential into firing rate.
Parameters
----------
v : float
membrane potential
Returns
----------
float
f(v) : firing rate
"""
return 1.0 / (1.0 + np.exp(-(v-20.0)/2.0))
def dxdt(self, v: float, x: float) -> float:
""" Differential equation for second-order gating variable x.
dx/dt = af(Vpre) - x/tau
Parameters
----------
v : float
membrane potential
x : float
second-order gating variable
Returns
----------
float
dx/dt
"""
return self.x_a * self.f(v) - x/self.x_tau
def dsdt(self, v: float, s: float, x: float) -> float:
""" Differential equation for gating variable s.
ds/dt = ax(1 - s) - s/tau
Parameters
----------
v : float
membrane potential
s : float
gating variable
x : float
second-order gating variable
Returns
----------
float
ds/dt
"""
return self.s_a * x * (1-s) - s/self.s_tau
def i(self, v: float, s: float) -> float:
""" Calculate current that flows by the receptor.
I = a * g * s * (v - e).
a : scaling variable, 1 (not considering ion concentration) or
1.1 / (1+Mg/8) (considering ion concentration)
g : gating variable
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
s : float
gating varriable
Returns
----------
float
current that flows by the receptor
"""
if self.ion:
return 1.1 / (1.0+self.ex_mg/8.0) * self.g * s * (v-self.e)
else:
return self.g * s * (v-self.e)
class GABAR(Base):
""" GABA receptor.
Note
----------
This formulation is from Compute et al., 2003
Parameters
----------
g : float or None
receptor conductance, default None
e : float
equiribrium potential for the receptor
s_a : float
coefficient of f(V)
tau_a : float
time constant for differential equation of gating variable s
Attributes
----------
g : float
the receptor conductance
e : float
equiribrium potential for the receptor
s_a : float
coefficient of f(V)
tau_a : float
time constant for differential equation of gating variable s
"""
def __init__(self, g: Optional[float]=None, e: float=params.vGABAR,
s_a: float=params.s_a_gabar, s_tau: float=params.s_tau_gabar) -> None:
super().__init__(g, e)
self.s_a = s_a
self.s_tau = s_tau
def f(self, v: float) -> float:
""" Function that converts membrane potential into firing rate.
Parameters
----------
v : float
membrane potential
Returns
----------
float
f(v) : firing rate
"""
return 1.0 / (1.0 + np.exp(-(v-20.0)/2.0))
def dsdt(self, v: float, s: float) -> float:
""" Differential equation for gating variable s.
ds/dt = af(Vpre) - s/tau
Parameters
----------
v : float
membrane potential
s : float
gating variable
Returns
----------
float
ds/dt
"""
return self.s_a * self.f(v) - s/self.s_tau
def i(self, v: float, s: float) -> float:
""" Calculate current that flows by the receptor.
I = g * s * (v - e).
g : gating variable
v : membrane potential
e : equiribrium potential (for potassium ion)
Parameters
----------
v : float
membrane potential
s : float
gating varriable
Returns
----------
float
current that flows by the receptor
"""
return self.g * s * (v-self.e)
| 26.256992 | 88 | 0.507772 | 3,860 | 34,738 | 4.507772 | 0.083679 | 0.02931 | 0.025287 | 0.056552 | 0.828103 | 0.786667 | 0.761034 | 0.740115 | 0.714828 | 0.705057 | 0 | 0.012701 | 0.381254 | 34,738 | 1,322 | 89 | 26.276853 | 0.796827 | 0.547009 | 0 | 0.312169 | 0 | 0 | 0.009112 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.343915 | false | 0 | 0.026455 | 0.015873 | 0.703704 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
fc86bbf26d4c490a903f1d61ac780bbdd12acb53 | 2,016 | py | Python | _unittests/test_jspy/test_render_nb_js_dot.py | sdpython/jyquickhelper | b8b106ada7f8b606d4f152e186a3343b6c4ab2bb | [
"MIT"
] | 3 | 2020-03-06T23:17:14.000Z | 2021-10-16T05:51:21.000Z | _unittests/test_jspy/test_render_nb_js_dot.py | sdpython/jyquickhelper | b8b106ada7f8b606d4f152e186a3343b6c4ab2bb | [
"MIT"
] | 9 | 2016-12-07T10:28:01.000Z | 2021-10-16T10:45:43.000Z | _unittests/test_jspy/test_render_nb_js_dot.py | sdpython/jyquickhelper | b8b106ada7f8b606d4f152e186a3343b6c4ab2bb | [
"MIT"
] | null | null | null | """
@brief test log(time=2s)
"""
import unittest
from pyquickhelper.loghelper import fLOG
from jyquickhelper import RenderJsDot
class TestRenderNbJsDot(unittest.TestCase):
def test_render_nb_js_dot(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
f = RenderJsDot(dot='digraph{ a -> b; }')
assert f
if hasattr(f, "_ipython_display_"):
f._ipython_display_()
else:
f._repr_html_()
f = RenderJsDot(dot='digraph{ a -> b; }', only_html=True)
out = f._repr_html_()
self.assertIn('var svgGraph = Viz("', out)
self.assertNotIn('None', out)
def test_render_nb_js_dot_api(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
f = RenderJsDot(dot='digraph{ a -> b; }', only_html=True)
out = f._repr_html_()
self.assertIn('var svgGraph = Viz("', out)
self.assertNotIn('None', out)
def test_render_nb_js_dot_local(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
f = RenderJsDot(dot='digraph{ a -> b; }')
assert f
if hasattr(f, "_ipython_display_"):
f._ipython_display_()
else:
f._repr_html_()
f = RenderJsDot(dot='digraph{ a -> b; }', only_html=True, local=True)
out = f._repr_html_()
self.assertIn('var svgGraph = Viz("', out)
self.assertNotIn('None', out)
def test_render_nb_js_dot_api_local(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
f = RenderJsDot(dot='digraph{ a -> b; }', only_html=True, local=True)
out = f._repr_html_()
self.assertIn('var svgGraph = Viz("', out)
self.assertNotIn('None', out)
if __name__ == "__main__":
unittest.main()
| 28 | 77 | 0.569444 | 221 | 2,016 | 4.678733 | 0.226244 | 0.069633 | 0.087041 | 0.12766 | 0.843327 | 0.843327 | 0.823985 | 0.823985 | 0.823985 | 0.823985 | 0 | 0.000713 | 0.304067 | 2,016 | 71 | 78 | 28.394366 | 0.736279 | 0.014385 | 0 | 0.814815 | 0 | 0 | 0.140475 | 0 | 0 | 0 | 0 | 0 | 0.185185 | 1 | 0.074074 | false | 0 | 0.055556 | 0 | 0.148148 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc99cd83b20bb770fd6cdfd1171db6d2cf7fa64d | 1,260 | py | Python | tests/traces/test_trace.py | matan1008/pykdebugparser | e219c2434d012b935ee25f75571647aaed1a4dda | [
"MIT"
] | 10 | 2021-06-17T14:07:38.000Z | 2021-12-09T18:33:48.000Z | tests/traces/test_trace.py | matan1008/pykdebugparser | e219c2434d012b935ee25f75571647aaed1a4dda | [
"MIT"
] | null | null | null | tests/traces/test_trace.py | matan1008/pykdebugparser | e219c2434d012b935ee25f75571647aaed1a4dda | [
"MIT"
] | 3 | 2021-06-22T13:01:59.000Z | 2021-06-27T03:35:04.000Z | from pykdebugparser.kevent import Kevent
def test_trace_data_thread_terminate(traces_parser):
events = [
Kevent(timestamp=1805581011060,
data=(b'z\x1b\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'),
values=(269178, 0, 0, 0), tid=479, debugid=117440524, eventid=117440524, func_qualifier=0)
]
traces_parser.threads_pids[269178] = 61
traces_parser.tids_names[269178] = 'terminated thread'
ret = list(traces_parser.feed_generator(events))
assert len(ret) == 1
assert str(ret[0]) == 'Thread terminated tid: 269178, pid: 61, name: terminated thread'
def test_trace_data_thread_terminate_missing_tid(traces_parser):
events = [
Kevent(timestamp=1805581011060,
data=(b'z\x1b\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'),
values=(269178, 0, 0, 0), tid=479, debugid=117440524, eventid=117440524, func_qualifier=0)
]
ret = list(traces_parser.feed_generator(events))
assert len(ret) == 1
assert str(ret[0]) == 'Thread terminated tid: 269178'
| 45 | 105 | 0.661111 | 185 | 1,260 | 4.383784 | 0.254054 | 0.399507 | 0.554871 | 0.680641 | 0.821208 | 0.821208 | 0.74476 | 0.74476 | 0.74476 | 0.74476 | 0 | 0.23913 | 0.196825 | 1,260 | 27 | 106 | 46.666667 | 0.562253 | 0 | 0 | 0.608696 | 0 | 0.173913 | 0.284921 | 0.198413 | 0 | 0 | 0 | 0 | 0.173913 | 1 | 0.086957 | false | 0 | 0.043478 | 0 | 0.130435 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
fc9beeda36a26f68e7697c42fdf870e2feafea59 | 2,289 | py | Python | Exercícios/Ex.58.py | mattheuslima/Projetos-Curso_Python | ab4cab98fe69b70245b5bcf41edd0febe823ac6a | [
"MIT"
] | null | null | null | Exercícios/Ex.58.py | mattheuslima/Projetos-Curso_Python | ab4cab98fe69b70245b5bcf41edd0febe823ac6a | [
"MIT"
] | null | null | null | Exercícios/Ex.58.py | mattheuslima/Projetos-Curso_Python | ab4cab98fe69b70245b5bcf41edd0febe823ac6a | [
"MIT"
] | null | null | null | #Melhore o jogo do DESAFIO 28 onde o computador vai “pensar” em um número entre 0 e 10. Só que agora o jogador vai tentar adivinhar até acertar, mostrando no final quantos palpites foram necessários para vencer.
from random import randint
from time import sleep
print('-='*10)
print('{:=^20}'.format('Desafio 58'))
print('-='*10)
compt=randint(0,10)
player=int(input('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
#Caso o player coloque um valor inválido ele cai nesse while
while player>10 or player<0:
print('=' * len('Você colocou um número inválido.'))
print('Você colocou um número inválido.')
print('=' * len('Você colocou um número inválido.'))
player = int(input('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
print('\n\nVamos ver se você acertou...')
sleep(1)
while player!=compt:
print('=' * 30)
print('Você errou, tente novamente.\nEu pensei em {} e você em {}.'.format(compt,player))
compt = randint(0, 10)
print('=' * 30)
player=int(input('\nEstou pensando em um número de 0 - 10.\nVocê é capaz de advinhar qual é ?'))
#Caso o player coloque um valor inválido ele cai nesse while
while player>10 or player<0:
print('=' * len('Você colocou um número inválido.'))
print('Você colocou um número inválido.')
print('=' * len('Você colocou um número inválido.'))
print('=' * len('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
player = int(input('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
print('=' * len('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
print('\n\nVamos ver se você acertou...')
sleep(1)
while player != compt:
print('=' * 30)
print('\nVocê errou, tente novamente.\nEu pensei em {} e você em {}.'.format(compt, player))
compt = randint(0, 10)
print('=' * 30)
print('=' * len('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
player = int(input('\nEstou pensando em um número de 0 - 10.\nVocê é capaz de advinhar qual é ?'))
print('=' * len('\nEstou pensando em um número de 0 - 10.\nTente advinhar qual é: '))
print('\nVocê finalmente conseguiu!\nEu pensei em {} e você em {}.'.format(compt,player))
| 42.388889 | 211 | 0.657492 | 350 | 2,289 | 4.3 | 0.237143 | 0.08505 | 0.066445 | 0.107641 | 0.803987 | 0.803987 | 0.803987 | 0.803987 | 0.803987 | 0.780731 | 0 | 0.035442 | 0.198777 | 2,289 | 53 | 212 | 43.188679 | 0.785169 | 0.143294 | 0 | 0.842105 | 0 | 0 | 0.548008 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.052632 | 0 | 0.052632 | 0.578947 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
fc9d3b7d7b44c5946d590c4279707c22931c3d37 | 26,670 | py | Python | scripts/DR_Paired_db_SG41_52.py | hhuang2018/HLAWholeGeneAnalysis | 9cdd2e062a6cc2eed2ebfa84e1888687b2b98cf3 | [
"MIT"
] | 2 | 2018-03-28T19:06:40.000Z | 2020-08-06T08:32:09.000Z | scripts/DR_Paired_db_SG41_52.py | hhuang2018/HLAWholeGeneAnalysis | 9cdd2e062a6cc2eed2ebfa84e1888687b2b98cf3 | [
"MIT"
] | null | null | null | scripts/DR_Paired_db_SG41_52.py | hhuang2018/HLAWholeGeneAnalysis | 9cdd2e062a6cc2eed2ebfa84e1888687b2b98cf3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
"""
import glob
import sqlite3 as sql
from utils import phase_block_check as ps
from utils import IMGTdbIO
__author__ = "Hu Huang"
__copyright__ = "Copyright 2017, Hu Huang"
__credits__ = ["Add names"]
__license__ = "GPL"
__version__ = "0.1-dev"
__maintainer__ = "Hu Huang"
__email__ = "hwangtiger@gmail.com"
###############
# Class I
###############
#locus = "A"
all_DB_files = glob.glob("../Output/SG41_52/2018/IMGTv3310/AvailDB/*.db")
db_file = all_DB_files[2] ## 1:A, 3:C, 4:B
locus = db_file.split('_')[4]
conn = sql.connect(db_file) # automatically creates a file if doesn't exist
conn.row_factory = sql.Row # Each row is a dictionary: {colNames: Value}
cursor = conn.cursor()
#cursor.execute('SELECT * FROM OriginalSeqs')
All_caseID_cursor = cursor.execute('SELECT BMT_caseID FROM OriginalSeqs')
All_caseIDs_tuples = All_caseID_cursor.fetchall()
All_caseIDs = [item[0] for item in All_caseIDs_tuples]
All_caseIDs = list(set(All_caseIDs))
print(db_file+" has " + str(len(All_caseIDs)) + " cases.")
BMTcaseInfo_fp = "../../rawData/SG41-52_caseID.csv"
BMT_IDtable = ps.readBMTinfo(BMTcaseInfo_fp)
logf = open("../Output/SG41_52/2018/IMGTv3310/logfiles_preprocessing/HLA_"+ locus +"_preprocess.txt", "w")
logf.write(db_file+" has " + str(len(All_caseIDs)) + " cases.\n\n")
available_records = {}
counter = 0
for BMTcase in All_caseIDs:
query_c = cursor.execute('SELECT * FROM OriginalSeqs WHERE BMT_caseID=?', (BMTcase, ))
all_query = query_c.fetchall()
Donor = []
Recipient = []
for line in all_query:
if line['DRtype'] == 'D':
Donor.append({'HLATyping': line['HLATyping'], 'Block1':line['Block1'], 'PS': line['PS'], 'Block2': line['Block2']})
elif line['DRtype'] == 'R':
Recipient.append({'HLATyping': line['HLATyping'], 'Block1': line['Block1'], 'PS': line['PS'], 'Block2': line['Block2']})
if len(Donor)>2 :
#print(BMTcase + " has more donor sequences. Please double check!")
logf.write(BMTcase + " has more donor sequences. Please double check!\n")
counter += 1
#break
if len(Recipient)>2 :
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " has more recipient sequences. Please double check!\n")
counter += 1
#break
if len(Donor)==1 and (Donor[0]['PS'] == 'PS3' or Donor[0]['Block2'] != ''):
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " ONLY has targeted sequences. Please double check!\n")
counter += 1
Donor = []
#break
if len(Recipient)==1 and (Recipient[0]['PS'] == 'PS3' or Recipient[0]['Block2'] != ''):
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " ONLY has targeted sequences. Please double check!\n")
counter += 1
Recipient = []
#break
if len(Donor) != 0 and len(Recipient) != 0:
if BMTcase not in available_records.keys():
available_records[BMTcase] = {'Active': line['Active'], 'Audit':line['Audit'], 'Comment': line['Comment'], 'QC': ''}
if Recipient[0]['HLATyping'] == Donor[0]['HLATyping']: # first pair 1 vs 1
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[0]['Block1']}
if Recipient[0]['Block2'].isalpha() or Donor[0]['Block2'].isalpha():
if Recipient[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq'
if Donor[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
if Recipient[1]['HLATyping'] == Donor[1]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[1]['Block1']}
if Recipient[1]['Block2'].isalpha() or Donor[1]['Block2'].isalpha():
if Recipient[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq'
if Donor[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'] + "+" + Donor[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[1]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
elif Recipient[0]['HLATyping'] == Donor[1]['HLATyping']: # first pair: 1 vs 2
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[1]['Block1']}
if Recipient[0]['Block2'].isalpha() or Donor[1]['Block2'].isalpha():
if Recipient[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq;'
if Donor[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
if Recipient[1]['HLATyping'] == Donor[0]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[0]['Block1']}
if Recipient[1]['Block2'].isalpha() or Donor[0]['Block2'].isalpha():
if Recipient[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq;'
if Donor[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'] + "+" + Donor[0]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[0]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
###
elif Recipient[1]['HLATyping'] == Donor[0]['HLATyping']: # first pair: 2 vs 1
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[0]['Block1']}
if Recipient[1]['Block2'].isalpha() or Donor[0]['Block2'].isalpha():
if Recipient[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq;'
if Donor[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
if Recipient[0]['HLATyping'] == Donor[1]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[1]['Block1']}
if Recipient[0]['Block2'].isalpha() or Donor[1]['Block2'].isalpha():
if Recipient[0]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq;'
if Donor[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping'] + "+" + Donor[1]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[1]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
###
elif Recipient[1]['HLATyping'] == Donor[1]['HLATyping']: # first pair: 2 vs 2
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[1]['Block1']}
if Recipient[1]['Block2'].isalpha() or Donor[1]['Block2'].isalpha():
if Recipient[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = '; R:Unexpected Block2 Seq;'
if Donor[1]['Block2'].isalpha():
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + '; D:Unexpected Block2 Seq'
else:
available_records[BMTcase]['QC'] = 'PASS'
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping']+ "+" +Donor[0]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[0]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
else:
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping']+ "+" +Donor[0]['HLATyping'], 'Recipient': Recipient[0]['Block1'], 'Donor': Donor[0]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping']+ "+" +Donor[1]['HLATyping'], 'Recipient': Recipient[1]['Block1'], 'Donor': Donor[1]['Block1']}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
else:
#print('Case ID ' + BMTcase + ' is missing matching donor or recipient info.' )
BMTtb_index = BMT_IDtable['BMTcase'].index(BMTcase)
if len(Donor) == 0:
logf.write('Case ID\t' + BMTcase + ': Donor Info (DID: ' + BMT_IDtable['NMDP_DID'][BMTtb_index] + ' )is missing.\n')
counter += 1
if len(Recipient) == 0:
logf.write('Case ID\t' + BMTcase + ': Recipient Info(RID '+ BMT_IDtable['NMDP_RID'][BMTtb_index] +' ) is missing.\n')
counter += 1
logf.write('\nTotal missing values:\t' + str(counter) + '\n')
logf.close()
conn.close()
output = "../Output/SG41_52/2018/IMGTv3310/SG41_52_DRpairs/"
for BMTcase, case_SeqInfo in available_records.items():
filename = output + "SG41_52_HLA_" + locus + "_paired.db"
# original sequence table
conn = sql.connect(filename) # automatically creates a file if doesn't exist
cursor = conn.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS OriginalSeqs
(BMT_caseID text, Audit text, Active text, Comment text,
QC text, HLATyping text, PS text, Donor text, Recipient text)''')
BMT_caseID = str(BMTcase)
Audit = str(case_SeqInfo["Audit"])
Active = str(case_SeqInfo["Active"])
Comment = str(case_SeqInfo["Comment"])
QC = str(case_SeqInfo['QC'])
# PS1
PS_set = ['PS1', 'PS2']
for PS in PS_set:
HLATyping = case_SeqInfo[PS]['HLATyping']
Donor = case_SeqInfo[PS]['Donor']
Recipient = case_SeqInfo[PS]['Recipient']
record = (BMT_caseID, Audit, Active, Comment, QC, HLATyping, PS, Donor, Recipient, )
cursor.execute('INSERT INTO OriginalSeqs VALUES (?,?,?,?,?,?,?,?,?)', record)
conn.commit()
conn.close()
fname = output + 'SG41_52_HLA_' + locus + '_paired'
IMGTdbIO.save_dict2pickle(available_records, fname)
#aa = IMGTdbIO.load_pickle2dict(fname, output)
#################
# Class II
#################
all_DB_files = glob.glob("../Output/SG41_52/2018/IMGTv3310/AvailDB/*.db")
db_file = all_DB_files[4] ## 0: DPB1 2:DRB1 5:DQB1
locus = db_file.split('_')[4]
conn = sql.connect(db_file) # automatically creates a file if doesn't exist
conn.row_factory = sql.Row # Each row is a dictionary: {colNames: Value}
cursor = conn.cursor()
#cursor.execute('SELECT * FROM OriginalSeqs')
All_caseID_cursor = cursor.execute('SELECT BMT_caseID FROM OriginalSeqs')
All_caseIDs_tuples = All_caseID_cursor.fetchall()
All_caseIDs = [item[0] for item in All_caseIDs_tuples]
All_caseIDs = list(set(All_caseIDs))
print(db_file+" has " + str(len(All_caseIDs)) + " cases.")
BMTcaseInfo_fp = "../../rawData/SG41-52_caseID.csv"
BMT_IDtable = ps.readBMTinfo(BMTcaseInfo_fp)
available_records = {}
logf = open("../Output/SG41_52/2018/IMGTv3310/logfiles_preprocessing/HLA_"+ locus +"_preprocess.txt", "w")
logf.write(db_file+" has " + str(len(All_caseIDs)) + " cases.\n\n")
counter = 0
for BMTcase in All_caseIDs:
query_c = cursor.execute('SELECT * FROM OriginalSeqs WHERE BMT_caseID=?', (BMTcase, ))
all_query = query_c.fetchall()
Donor = []
Recipient = []
for line in all_query:
if line['DRtype'] == 'D':
Donor.append({'HLATyping': line['HLATyping'], 'Block1':line['Block1'], 'PS': line['PS'], 'Block2': line['Block2']})
elif line['DRtype'] == 'R':
Recipient.append({'HLATyping': line['HLATyping'], 'Block1': line['Block1'], 'PS': line['PS'], 'Block2': line['Block2']})
if len(Donor)>2 :
#print(BMTcase + " has more donor sequences. Please double check!")
logf.write(BMTcase + " has more donor sequences. Please double check!\n")
counter += 1
#break
if len(Recipient)>2 :
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " has more recipient sequences. Please double check!\n")
counter += 1
#break
if len(Donor)==1 and (Donor[0]['PS'] == 'PS3' or Donor[0]['Block1'] == 'No sequence for nomakes'):
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " ONLY has targeted sequences. Please double check!\n")
counter += 1
Donor = []
#break
if len(Recipient)==1 and (Recipient[0]['PS'] == 'PS3' or Recipient[0]['Block1'] == 'No sequence for nomakes'):
#print(BMTcase + " has more recipient sequences. Please double check!")
logf.write(BMTcase + " ONLY has targeted sequences. Please double check!\n")
counter += 1
Recipient = []
if len(Donor) == 2 and len(Recipient) == 2:
if BMTcase not in available_records.keys():
available_records[BMTcase] = {'Active': line['Active'], 'Audit':line['Audit'], 'Comment': line['Comment'], 'QC': ''}
if Recipient[0]['HLATyping'] == Donor[0]['HLATyping']: # first pair 1 vs 1
if Recipient[0]['Block2'].isalpha():
Rec_seq = Recipient[0]['Block1'] + '----------' + Recipient[0]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[0]['Block1']
if Donor[0]['Block2'].isalpha():
Don_seq = Donor[0]['Block1'] + '----------' + Donor[0]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[0]['Block1']
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
## second pair
if Recipient[1]['Block2'].isalpha():
Rec_seq = Recipient[1]['Block1'] + '----------' + Recipient[1]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[1]['Block1']
if Donor[1]['Block2'].isalpha():
Don_seq = Donor[1]['Block1'] + '----------' + Donor[1]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[1]['Block1']
if Recipient[1]['HLATyping'] == Donor[1]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'] + "+" + Donor[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
elif Recipient[0]['HLATyping'] == Donor[1]['HLATyping']: # first pair: 1 vs 2
if Recipient[0]['Block2'].isalpha():
Rec_seq = Recipient[0]['Block1'] + '----------' + Recipient[0]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[0]['Block1']
if Donor[1]['Block2'].isalpha():
Don_seq = Donor[1]['Block1'] + '----------' + Donor[1]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[1]['Block1']
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
# second pair
if Recipient[1]['Block2'].isalpha():
Rec_seq = Recipient[1]['Block1'] + '----------' + Recipient[1]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[1]['Block1']
if Donor[0]['Block2'].isalpha():
Don_seq = Donor[0]['Block1'] + '----------' + Donor[0]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[0]['Block1']
if Recipient[1]['HLATyping'] == Donor[0]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping'] + "+" + Donor[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
###
elif Recipient[1]['HLATyping'] == Donor[0]['HLATyping']: # first pair: 2 vs 1
if Recipient[1]['Block2'].isalpha():
Rec_seq = Recipient[1]['Block1'] + '----------' + Recipient[1]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[1]['Block1']
if Donor[0]['Block2'].isalpha():
Don_seq = Donor[0]['Block1'] + '----------' + Donor[0]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[0]['Block1']
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
# second pair
if Recipient[0]['Block2'].isalpha():
Rec_seq = Recipient[0]['Block1'] + '----------' + Recipient[0]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[0]['Block1']
if Donor[1]['Block2'].isalpha():
Don_seq = Donor[1]['Block1'] + '----------' + Donor[1]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[1]['Block1']
if Recipient[0]['HLATyping'] == Donor[1]['HLATyping']: # second pair
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
else:
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping'] + "+" + Donor[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
###
elif Recipient[1]['HLATyping'] == Donor[1]['HLATyping']: # first pair: 2 vs 2
if Recipient[1]['Block2'].isalpha():
Rec_seq = Recipient[1]['Block1'] + '----------' + Recipient[1]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[1]['Block1']
if Donor[1]['Block2'].isalpha():
Don_seq = Donor[1]['Block1'] + '----------' + Donor[1]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[1]['Block1']
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = 'PASS'
# second pair
if Recipient[0]['Block2'].isalpha():
Rec_seq = Recipient[0]['Block1'] + '----------' + Recipient[0]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[0]['Block1']
if Donor[0]['Block2'].isalpha():
Don_seq = Donor[0]['Block1'] + '----------' + Donor[0]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[0]['Block1']
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[0]['HLATyping']+ "+" +Donor[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
else: # both pairs don't match
if Recipient[0]['Block2'].isalpha():
Rec_seq = Recipient[0]['Block1'] + '----------' + Recipient[0]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[0]['Block1']
if Donor[0]['Block2'].isalpha():
Don_seq = Donor[0]['Block1'] + '----------' + Donor[0]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[0]['Block1']
available_records[BMTcase]['PS1'] = {'HLATyping': Recipient[0]['HLATyping']+ "+" +Donor[0]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
if Recipient[1]['Block2'].isalpha():
Rec_seq = Recipient[1]['Block1'] + '----------' + Recipient[1]['Block2'] # 10 '-' between two blocks
else:
Rec_seq = Recipient[1]['Block1']
if Donor[1]['Block2'].isalpha():
Don_seq = Donor[1]['Block1'] + '----------' + Donor[1]['Block2'] # 10 '-' between two blocks
else:
Don_seq = Donor[1]['Block1']
available_records[BMTcase]['PS2'] = {'HLATyping': Recipient[1]['HLATyping']+ "+" +Donor[1]['HLATyping'], 'Recipient': Rec_seq, 'Donor': Don_seq}
available_records[BMTcase]['QC'] = available_records[BMTcase]['QC'] + "; D-R pair GL-string doesn't match."
elif len(Donor) == 0 or len(Recipient) == 0:
#print('Case ID ' + BMTcase + ' is missing matching donor or recipient info.' )
BMTtb_index = BMT_IDtable['BMTcase'].index(BMTcase)
if len(Donor) == 0:
logf.write('Case ID\t' + BMTcase + ': Donor Info (DID: ' + BMT_IDtable['NMDP_DID'][BMTtb_index] + ' )is missing.\n')
counter += 1
if len(Recipient) == 0:
logf.write('Case ID\t' + BMTcase + ': Recipient Info(RID '+ BMT_IDtable['NMDP_RID'][BMTtb_index] +' ) is missing.\n')
counter += 1
elif len(Donor) == 1 or len(Recipient) == 1:
BMTtb_index = BMT_IDtable['BMTcase'].index(BMTcase)
if len(Donor) == 1:
logf.write('Case ID\t' + BMTcase + ': Donor (DID: ' + BMT_IDtable['NMDP_DID'][BMTtb_index] + ' ) has only one PS.\n')
counter += 1
if len(Recipient) == 1:
logf.write('Case ID\t' + BMTcase + ': Recipient (RID '+ BMT_IDtable['NMDP_RID'][BMTtb_index] +' ) has only one PS.\n')
counter += 1
else:
logf.write('Case ID\t' + BMTcase + ' has irregular number of sequences. Please double check.\n')
counter += 1
logf.write('\nTotal missing values:\t' + str(counter) + '\n')
logf.close()
conn.close()
output = "../Output/SG41_52/2018/IMGTv3310/SG41_52_DRpairs/"
for BMTcase, case_SeqInfo in available_records.items():
filename = output + "SG41_52_HLA_" + locus + "_paired.db"
# original sequence table
conn = sql.connect(filename) # automatically creates a file if doesn't exist
cursor = conn.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS OriginalSeqs
(BMT_caseID text, Audit text, Active text, Comment text,
QC text, HLATyping text, PS text, Donor text, Recipient text)''')
BMT_caseID = str(BMTcase)
Audit = str(case_SeqInfo["Audit"])
Active = str(case_SeqInfo["Active"])
Comment = str(case_SeqInfo["Comment"])
QC = str(case_SeqInfo['QC'])
# PS1
PS_set = ['PS1', 'PS2']
for PS in PS_set:
HLATyping = case_SeqInfo[PS]['HLATyping']
Donor = case_SeqInfo[PS]['Donor']
Recipient = case_SeqInfo[PS]['Recipient']
record = (BMT_caseID, Audit, Active, Comment, QC, HLATyping, PS, Donor, Recipient, )
cursor.execute('INSERT INTO OriginalSeqs VALUES (?,?,?,?,?,?,?,?,?)', record)
conn.commit()
conn.close()
fname = output + 'SG41_52_HLA_' + locus + '_paired'
IMGTdbIO.save_dict2pickle(available_records, fname) | 51.486486 | 188 | 0.554068 | 2,966 | 26,670 | 4.865475 | 0.070128 | 0.105329 | 0.13866 | 0.102211 | 0.965837 | 0.965837 | 0.963274 | 0.954404 | 0.953572 | 0.946435 | 0 | 0.032639 | 0.267079 | 26,670 | 518 | 189 | 51.486486 | 0.705633 | 0.07994 | 0 | 0.885638 | 0 | 0 | 0.240491 | 0.015264 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.037234 | 0.010638 | 0 | 0.010638 | 0.005319 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d9810d4b4b8adccf9bdf7c225344ac34558b5cb | 8,178 | py | Python | common/f5/bigip/interfaces/iapp.py | Sinan828/oslbaasv1_for_cmcc | 52318460b704ed5f08491b482b47aae61bf166ca | [
"Apache-2.0"
] | null | null | null | common/f5/bigip/interfaces/iapp.py | Sinan828/oslbaasv1_for_cmcc | 52318460b704ed5f08491b482b47aae61bf166ca | [
"Apache-2.0"
] | null | null | null | common/f5/bigip/interfaces/iapp.py | Sinan828/oslbaasv1_for_cmcc | 52318460b704ed5f08491b482b47aae61bf166ca | [
"Apache-2.0"
] | null | null | null | """ Manage application services on BIG-IP using REST interface """
# Copyright 2014 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from f5.common.logger import Log
from f5.common import constants as const
from f5.bigip.interfaces import icontrol_rest_folder
from f5.bigip import exceptions
from f5.bigip.interfaces import log
import json
class IApp(object):
""" Manage iApps """
OBJ_PREFIX = 'uuid_'
def __init__(self, bigip):
self.bigip = bigip
@icontrol_rest_folder
@log
def service_exists(self, name=None, folder='Common'):
""" Does iApp exist? """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/service/'
request_url += '~' + folder + '~' + name + '.app~' + name
request_url += '?$select=name'
response = self.bigip.icr_session.get(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code != 404:
Log.error('IAPP', response.text)
return False
@icontrol_rest_folder
@log
def create_service(self, name=None, folder='Common', service=None):
""" Create iApp """
service['partition'] = folder
folder = str(folder).replace('/', '')
if not self.service_exists(name=name, folder=folder):
request_url = self.bigip.icr_url + '/sys/application/service/'
response = self.bigip.icr_session.post(
request_url, data=json.dumps(service),
timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code == 409 or response.status_code == 404:
return True
else:
Log.error('IAPP', response.text)
raise exceptions.IAppCreationException(response.text)
return False
@icontrol_rest_folder
@log
def get_service(self, name=None, folder='Common'):
""" Get application service """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/service/'
request_url += '~' + folder + '~' + name + '.app~' + name
response = self.bigip.icr_session.get(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return json.loads(response.text)
elif response.status_code != 404:
Log.error('IAPP', response.text)
raise exceptions.IAppQueryException(response.text)
return None
@icontrol_rest_folder
@log
def update_service(self, name, folder='Common', service=None):
""" Update application service """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/service/'
request_url += '~' + folder + '~' + name + '.app~' + name
response = self.bigip.icr_session.put(
request_url, data=json.dumps(service),
timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
else:
# ignore this anomaly for now
if 'The monitor rule was not found' in response.text:
Log.error('IAPP', response.text)
return True
Log.error('IAPP', response.text)
raise exceptions.IAppUpdateException(response.text)
return False
@icontrol_rest_folder
@log
def delete_service(self, name, folder='Common'):
""" Delete application service """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/service/'
request_url += '~' + folder + '~' + name + '.app~' + name
response = self.bigip.icr_session.delete(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code == 404:
return True
else:
Log.error('IAPP', response.text)
raise exceptions.IAppDeleteException(response.text)
return False
@icontrol_rest_folder
@log
def template_exists(self, name=None, folder='Common'):
""" Does iApp exist? """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/template/'
request_url += '~' + folder + '~' + name
request_url += '?$select=name'
response = self.bigip.icr_session.get(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code != 404:
Log.error('IAPP', response.text)
return False
@icontrol_rest_folder
@log
def create_template(self, name=None, folder='Common', template=None):
""" Create iApp """
template['partition'] = folder
folder = str(folder).replace('/', '')
if not self.template_exists(name=name, folder=folder):
request_url = self.bigip.icr_url + '/sys/application/template/'
response = self.bigip.icr_session.post(
request_url, data=json.dumps(template),
timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code == 409 or response.status_code == 404:
return True
else:
Log.error('IAPP', response.text)
raise exceptions.IAppCreationException(response.text)
return False
@icontrol_rest_folder
@log
def get_template(self, name=None, folder='Common'):
""" Get application template """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/template/'
request_url += '~' + folder + '~' + name
response = self.bigip.icr_session.get(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return json.loads(response.text)
elif response.status_code != 404:
Log.error('IAPP', response.text)
raise exceptions.IAppQueryException(response.text)
return None
@icontrol_rest_folder
@log
def update_template(self, name, folder='Common', template=None):
""" Update application template """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/template/'
request_url += '~' + folder + '~' + name
response = self.bigip.icr_session.put(
request_url, data=json.dumps(template),
timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
else:
Log.error('IAPP', response.text)
raise exceptions.IAppUpdateException(response.text)
return False
@icontrol_rest_folder
@log
def delete_template(self, name, folder='Common'):
""" Delete application template """
folder = str(folder).replace('/', '')
request_url = self.bigip.icr_url + '/sys/application/template/'
request_url += '~' + folder + '~' + name
response = self.bigip.icr_session.delete(
request_url, timeout=const.CONNECTION_TIMEOUT)
if response.status_code < 400:
return True
elif response.status_code == 404:
return True
else:
Log.error('IAPP', response.text)
raise exceptions.IAppDeleteException(response.text)
return False
| 38.394366 | 76 | 0.61054 | 911 | 8,178 | 5.349067 | 0.163557 | 0.061564 | 0.049251 | 0.045147 | 0.82126 | 0.790889 | 0.758055 | 0.746973 | 0.746973 | 0.72009 | 0 | 0.012506 | 0.276473 | 8,178 | 212 | 77 | 38.575472 | 0.811053 | 0.10785 | 0 | 0.850299 | 0 | 0 | 0.067277 | 0.035372 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065868 | false | 0 | 0.035928 | 0 | 0.263473 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5da6ad10afbcde771bfadd42ac8ca473498327be | 13,439 | py | Python | packages/augur-core/tests/trading/test_cancelOrder.py | mjgiannelli/augur | c10763a8a184132fde8cf9c003f1dbf26c15a8e4 | [
"MIT"
] | null | null | null | packages/augur-core/tests/trading/test_cancelOrder.py | mjgiannelli/augur | c10763a8a184132fde8cf9c003f1dbf26c15a8e4 | [
"MIT"
] | null | null | null | packages/augur-core/tests/trading/test_cancelOrder.py | mjgiannelli/augur | c10763a8a184132fde8cf9c003f1dbf26c15a8e4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from eth_tester.exceptions import TransactionFailed
from pytest import raises, mark
from utils import longTo32Bytes, longToHexString, fix, AssertLog, BuyWithCash, nullAddress
from constants import BID, ASK, YES, NO
def test_cancelBid(contractsFixture, cash, market, universe):
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orders = contractsFixture.contracts['Orders']
orderType = BID
amount = fix(1)
fxpPrice = 60
outcomeID = YES
tradeGroupID = longTo32Bytes(42)
yesShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(YES))
noShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(NO))
creatorInitialShares = yesShareToken.balanceOf(contractsFixture.accounts[1])
marketInitialCash = cash.balanceOf(market.address)
marketInitialYesShares = yesShareToken.totalSupply()
marketInitialNoShares = noShareToken.totalSupply()
with BuyWithCash(cash, fix(fxpPrice), contractsFixture.accounts[1], "The sender didn't get cost deducted for create order"):
orderID = createOrder.publicCreateOrder(orderType, amount, fxpPrice, market.address, outcomeID, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, nullAddress, sender=contractsFixture.accounts[1])
assert orderID, "Order ID should be non-zero"
assert orders.getOrderCreator(orderID), "Order should have an owner"
orderEventLog = {
"universe": universe.address,
"market": market.address,
"eventType": 1,
"addressData": [nullAddress, contractsFixture.accounts[1], nullAddress],
"uint256Data": [0, 0, 0, fix('1', '60'), 0, 0, 0, contractsFixture.contracts['Time'].getTimestamp(), 0, 0],
}
with AssertLog(contractsFixture, 'OrderEvent', orderEventLog):
assert(cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1]) == 1), "cancelOrder should succeed"
assert orders.getAmount(orderID) == 0
assert orders.getPrice(orderID) == 0
assert orders.getOrderCreator(orderID) == longToHexString(0)
assert orders.getOrderMoneyEscrowed(orderID) == 0
assert orders.getOrderSharesEscrowed(orderID) == 0
assert orders.getBetterOrderId(orderID) == longTo32Bytes(0)
assert orders.getWorseOrderId(orderID) == longTo32Bytes(0)
assert(cash.balanceOf(contractsFixture.accounts[1]) == fix('60')), "Maker's cash balance should be order size"
assert(marketInitialCash == cash.balanceOf(market.address)), "Market's cash balance should be the same as before the order was placed"
assert(creatorInitialShares == yesShareToken.balanceOf(contractsFixture.accounts[1])), "Maker's shares should be unchanged"
assert(marketInitialYesShares == yesShareToken.totalSupply()), "Market's yes shares should be unchanged"
assert marketInitialNoShares == noShareToken.totalSupply(), "Market's no shares should be unchanged"
def test_cancelAsk(contractsFixture, cash, market):
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orders = contractsFixture.contracts['Orders']
orderType = ASK
amount = fix(1)
fxpPrice = 60
outcomeID = 1
tradeGroupID = longTo32Bytes(42)
yesShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(YES))
noShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(NO))
creatorInitialShares = yesShareToken.balanceOf(contractsFixture.accounts[1])
marketInitialCash = cash.balanceOf(market.address)
marketInitialYesShares = yesShareToken.totalSupply()
marketInitialNoShares = noShareToken.totalSupply()
with BuyWithCash(cash, fix(100 - fxpPrice), contractsFixture.accounts[1], "create order"):
orderID = createOrder.publicCreateOrder(orderType, amount, fxpPrice, market.address, outcomeID, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, nullAddress, sender=contractsFixture.accounts[1])
assert(orderID != bytearray(32)), "Order ID should be non-zero"
assert orders.getOrderCreator(orderID), "Order should have an owner"
assert(cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1]) == 1), "cancelOrder should succeed"
assert orders.getAmount(orderID) == 0
assert orders.getPrice(orderID) == 0
assert orders.getOrderCreator(orderID) == longToHexString(0)
assert orders.getOrderMoneyEscrowed(orderID) == 0
assert orders.getOrderSharesEscrowed(orderID) == 0
assert orders.getBetterOrderId(orderID) == longTo32Bytes(0)
assert orders.getWorseOrderId(orderID) == longTo32Bytes(0)
assert(marketInitialCash == cash.balanceOf(market.address)), "Market's cash balance should be the same as before the order was placed"
assert(creatorInitialShares == yesShareToken.balanceOf(contractsFixture.accounts[1])), "Maker's shares should be unchanged"
assert(marketInitialYesShares == yesShareToken.totalSupply()), "Market's yes shares should be unchanged"
assert marketInitialNoShares == noShareToken.totalSupply(), "Market's no shares should be unchanged"
def test_cancelWithSharesInEscrow(contractsFixture, cash, market, universe):
completeSets = contractsFixture.contracts['CompleteSets']
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orders = contractsFixture.contracts['Orders']
yesShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(YES))
noShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(NO))
totalProceeds = fix('12', market.getNumTicks())
marketCreatorFee = totalProceeds / market.getMarketCreatorSettlementFeeDivisor()
reporterFee = totalProceeds / universe.getOrCacheReportingFeeDivisor()
completeSetFees = marketCreatorFee + reporterFee
# buy complete sets
with BuyWithCash(cash, fix('12', market.getNumTicks()), contractsFixture.accounts[1], "buy complete set"):
assert completeSets.publicBuyCompleteSets(market.address, fix(12), sender = contractsFixture.accounts[1])
assert cash.balanceOf(contractsFixture.accounts[1]) == fix('0')
assert yesShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
assert noShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
creatorInitialShares = yesShareToken.balanceOf(contractsFixture.accounts[1])
marketInitialCash = cash.balanceOf(market.address)
marketInitialYesShares = yesShareToken.totalSupply()
marketInitialNoShares = noShareToken.totalSupply()
# create BID order for YES with NO shares escrowed
assert noShareToken.approve(createOrder.address, fix(12), sender = contractsFixture.accounts[1])
orderID = createOrder.publicCreateOrder(BID, fix(12), 60, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), longTo32Bytes(42), nullAddress, sender = contractsFixture.accounts[1])
assert orderID
assert cash.balanceOf(contractsFixture.accounts[1]) == fix('0')
assert yesShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
assert noShareToken.balanceOf(contractsFixture.accounts[1]) == 0
# now cancel the order
assert(cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1]) == 1), "cancelOrder should succeed"
assert orders.getAmount(orderID) == 0
assert orders.getPrice(orderID) == 0
assert orders.getOrderCreator(orderID) == longToHexString(0)
assert orders.getOrderMoneyEscrowed(orderID) == 0
assert orders.getOrderSharesEscrowed(orderID) == 0
assert orders.getBetterOrderId(orderID) == longTo32Bytes(0)
assert orders.getWorseOrderId(orderID) == longTo32Bytes(0)
assert(marketInitialCash == cash.balanceOf(market.address)), "Market's cash balance should be the same as before the order was placed"
assert(creatorInitialShares == yesShareToken.balanceOf(contractsFixture.accounts[1])), "Maker's shares should be unchanged"
assert(marketInitialYesShares == yesShareToken.totalSupply()), "Market's yes shares should be unchanged"
assert marketInitialNoShares == noShareToken.totalSupply(), "Market's no shares should be unchanged"
def test_cancelWithSharesInEscrowAsk(contractsFixture, cash, market, universe):
completeSets = contractsFixture.contracts['CompleteSets']
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orders = contractsFixture.contracts['Orders']
yesShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(YES))
noShareToken = contractsFixture.applySignature('ShareToken', market.getShareToken(NO))
totalProceeds = fix('12', market.getNumTicks())
marketCreatorFee = totalProceeds / market.getMarketCreatorSettlementFeeDivisor()
reporterFee = totalProceeds / universe.getOrCacheReportingFeeDivisor()
completeSetFees = marketCreatorFee + reporterFee
# buy complete sets
with BuyWithCash(cash, fix('12', market.getNumTicks()), contractsFixture.accounts[1], "buy complete set"):
assert completeSets.publicBuyCompleteSets(market.address, fix(12), sender = contractsFixture.accounts[1])
assert cash.balanceOf(contractsFixture.accounts[1]) == fix('0')
assert yesShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
assert noShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
creatorInitialShares = yesShareToken.balanceOf(contractsFixture.accounts[1])
marketInitialCash = cash.balanceOf(market.address)
marketInitialYesShares = yesShareToken.totalSupply()
marketInitialNoShares = noShareToken.totalSupply()
# create ASK order for YES with YES shares escrowed
assert noShareToken.approve(createOrder.address, fix(12), sender = contractsFixture.accounts[1])
orderID = createOrder.publicCreateOrder(ASK, fix(12), 60, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), longTo32Bytes(42), nullAddress, sender = contractsFixture.accounts[1])
assert orderID
assert cash.balanceOf(contractsFixture.accounts[1]) == fix('0')
assert yesShareToken.balanceOf(contractsFixture.accounts[1]) == 0
assert noShareToken.balanceOf(contractsFixture.accounts[1]) == fix(12)
# now cancel the order
assert(cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1]) == 1), "cancelOrder should succeed"
assert orders.getAmount(orderID) == 0
assert orders.getPrice(orderID) == 0
assert orders.getOrderCreator(orderID) == longToHexString(0)
assert orders.getOrderMoneyEscrowed(orderID) == 0
assert orders.getOrderSharesEscrowed(orderID) == 0
assert orders.getBetterOrderId(orderID) == longTo32Bytes(0)
assert orders.getWorseOrderId(orderID) == longTo32Bytes(0)
assert(marketInitialCash == cash.balanceOf(market.address)), "Market's cash balance should be the same as before the order was placed"
assert(creatorInitialShares == yesShareToken.balanceOf(contractsFixture.accounts[1])), "Maker's shares should be unchanged"
assert(marketInitialYesShares == yesShareToken.totalSupply()), "Market's yes shares should be unchanged"
assert marketInitialNoShares == noShareToken.totalSupply(), "Market's no shares should be unchanged"
def test_exceptions(contractsFixture, cash, market):
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orderType = BID
amount = fix(1)
fxpPrice = 60
outcomeID = YES
tradeGroupID = longTo32Bytes(42)
with BuyWithCash(cash, fix(fxpPrice), contractsFixture.accounts[1], "create order"):
orderID = createOrder.publicCreateOrder(orderType, amount, fxpPrice, market.address, outcomeID, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, nullAddress, sender=contractsFixture.accounts[1])
assert(orderID != bytearray(32)), "Order ID should be non-zero"
# cancelOrder exceptions
with raises(TransactionFailed):
cancelOrder.cancelOrder(longTo32Bytes(0), sender=contractsFixture.accounts[1])
with raises(TransactionFailed):
cancelOrder.cancelOrder(longTo32Bytes(1), sender=contractsFixture.accounts[1])
with raises(TransactionFailed):
cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[2])
assert(cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1]) == 1), "cancelOrder should succeed"
with raises(TransactionFailed):
cancelOrder.cancelOrder(orderID, sender=contractsFixture.accounts[1])
def test_cancelOrders(contractsFixture, cash, market, universe):
createOrder = contractsFixture.contracts['CreateOrder']
cancelOrder = contractsFixture.contracts['CancelOrder']
orders = contractsFixture.contracts['Orders']
orderType = BID
amount = fix(1)
fxpPrice = 60
outcomeID = YES
tradeGroupID = longTo32Bytes(42)
orderIDs = []
for i in range(10):
with BuyWithCash(cash, fix(fxpPrice + i), contractsFixture.accounts[0], "create order"):
orderIDs.append(createOrder.publicCreateOrder(orderType, amount, fxpPrice + i, market.address, outcomeID, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, nullAddress))
for i in range(10):
assert orders.getAmount(orderIDs[i]) == amount
assert cancelOrder.cancelOrders(orderIDs)
for i in range(10):
assert orders.getAmount(orderIDs[i]) == 0
| 58.430435 | 203 | 0.758315 | 1,323 | 13,439 | 7.697657 | 0.104308 | 0.108405 | 0.108013 | 0.07011 | 0.925962 | 0.915848 | 0.902789 | 0.898665 | 0.887667 | 0.85271 | 0 | 0.021427 | 0.135278 | 13,439 | 229 | 204 | 58.68559 | 0.854918 | 0.01637 | 0 | 0.793651 | 0 | 0 | 0.113088 | 0 | 0 | 0 | 0 | 0 | 0.412698 | 1 | 0.031746 | false | 0 | 0.021164 | 0 | 0.05291 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5dc7952cd3f15d62cd8da754b204d597cfab7bbb | 340 | py | Python | odoo/base-addons/sale_stock/tests/__init__.py | LucasBorges-Santos/docker-odoo | 53987bbd61f6119669b5f801ee2ad54695084a21 | [
"MIT"
] | null | null | null | odoo/base-addons/sale_stock/tests/__init__.py | LucasBorges-Santos/docker-odoo | 53987bbd61f6119669b5f801ee2ad54695084a21 | [
"MIT"
] | null | null | null | odoo/base-addons/sale_stock/tests/__init__.py | LucasBorges-Santos/docker-odoo | 53987bbd61f6119669b5f801ee2ad54695084a21 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import test_anglo_saxon_valuation
from . import test_anglo_saxon_valuation_reconciliation
from . import test_anglosaxon_account
from . import test_sale_stock
from . import test_sale_stock_lead_time
from . import test_sale_order_dates
| 37.777778 | 74 | 0.823529 | 51 | 340 | 5.137255 | 0.588235 | 0.229008 | 0.320611 | 0.206107 | 0.427481 | 0.251908 | 0 | 0 | 0 | 0 | 0 | 0.003356 | 0.123529 | 340 | 8 | 75 | 42.5 | 0.875839 | 0.276471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5de0dfb431fee2972ac94ee8ac2de52ec532adb4 | 4,611 | py | Python | src/ops.py | jianwen-xie/Dynamic_generator | f68e99d654ba98a1085bb38dbfa4f043b931668a | [
"MIT"
] | 10 | 2018-12-24T01:58:39.000Z | 2021-07-01T02:56:44.000Z | src/ops.py | jianwen-xie/Dynamic_generator | f68e99d654ba98a1085bb38dbfa4f043b931668a | [
"MIT"
] | null | null | null | src/ops.py | jianwen-xie/Dynamic_generator | f68e99d654ba98a1085bb38dbfa4f043b931668a | [
"MIT"
] | 6 | 2019-02-14T07:17:42.000Z | 2022-02-25T10:21:10.000Z | import math
import numpy as numpy
import tensorflow as tf
def conv2d(input_, output_dim, kernal=(5, 5), strides=(2, 2), padding='SAME', stddev=0.02, name="conv2d"):
if type(kernal) == list or type(kernal) == tuple:
[k_h, k_w] = list(kernal)
else:
k_h = k_w = kernal
if type(strides) == list or type(strides) == tuple:
[d_h, d_w] = list(strides)
else:
d_h = d_w = strides
if type(padding) == list or type(padding) == tuple:
padding = [0] + list(padding) + [0]
input_ = tf.pad(input_, [[p, p] for p in padding], "CONSTANT")
padding = 'VALID'
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, input_.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding=padding)
biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0))
conv = tf.nn.bias_add(conv, biases)
# conv = tf.reshape(conv, conv.get_shape())
return conv
def conv3d(input_, output_dim, kernal=(5, 5, 5), strides=(2, 2, 2), padding='SAME', stddev=0.001, name="conv3d"):
if type(kernal) == list or type(kernal) == tuple:
[k_d, k_h, k_w] = list(kernal)
else:
k_d = k_h = k_w = kernal
if type(strides) == list or type(strides) == tuple:
[d_d, d_h, d_w] = list(strides)
else:
d_d = d_h = d_w = strides
if type(padding) == list or type(padding) == tuple:
padding = [0] + list(padding) + [0]
input_ = tf.pad(input_, [[p, p] for p in padding], "CONSTANT")
padding = 'VALID'
with tf.variable_scope(name):
w = tf.get_variable('w', [k_d, k_h, k_w, input_.get_shape()[-1], output_dim],
initializer=tf.random_normal_initializer(stddev=stddev))
conv = tf.nn.conv3d(input_, w, strides=[1, d_d, d_h, d_w, 1], padding=padding)
biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0))
conv = tf.nn.bias_add(conv, biases)
return conv
def convt2d(input_, output_shape, kernal=(5, 5), strides=(2, 2), padding='SAME', stddev=0.005, name="convt2d"):
if type(kernal) == list or type(kernal) == tuple:
[k_h, k_w] = list(kernal)
else:
k_h = k_w = kernal
if type(strides) == list or type(strides) == tuple:
[d_h, d_w] = list(strides)
else:
d_h = d_w = strides
if type(padding) == list or type(padding) == tuple:
padding = [0] + list(padding) + [0]
input_ = tf.pad(input_, [[p, p] for p in padding], "CONSTANT")
padding = 'VALID'
output_shape = list(output_shape)
output_shape[0] = tf.shape(input_)[0]
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
convt = tf.nn.conv2d_transpose(input_, w, output_shape=tf.stack(output_shape, axis=0),
strides=[1, d_h, d_w, 1], padding=padding)
biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.0))
convt = tf.nn.bias_add(convt, biases)
return convt
def convt3d(input_, output_shape, kernal=(5, 5, 5), strides=(2, 2, 2), padding='SAME', stddev=0.005, name="convt3d"):
if type(kernal) == list or type(kernal) == tuple:
[k_d, k_h, k_w] = list(kernal)
else:
k_d = k_h = k_w = kernal
if type(strides) == list or type(strides) == tuple:
[d_d, d_h, d_w] = list(strides)
else:
d_d = d_h = d_w = strides
if type(padding) == list or type(padding) == tuple:
padding = [0] + list(padding) + [0]
input_ = tf.pad(input_, [[p, p] for p in padding], "CONSTANT")
padding = 'VALID'
output_shape = list(output_shape)
output_shape[0] = tf.shape(input_)[0]
with tf.variable_scope(name):
w = tf.get_variable('w', [k_d, k_h, k_w, output_shape[-1], input_.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
convt = tf.nn.conv3d_transpose(input_, w, output_shape=tf.stack(output_shape, axis=0),
strides=[1, d_d, d_h, d_w, 1], padding=padding)
biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.0))
convt = tf.nn.bias_add(convt, biases)
return convt
| 41.918182 | 117 | 0.590327 | 683 | 4,611 | 3.781845 | 0.09224 | 0.068138 | 0.046458 | 0.018583 | 0.919861 | 0.915215 | 0.891986 | 0.861014 | 0.855594 | 0.855594 | 0 | 0.024774 | 0.25591 | 4,611 | 109 | 118 | 42.302752 | 0.728068 | 0.008892 | 0 | 0.797753 | 0 | 0 | 0.026708 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044944 | false | 0 | 0.033708 | 0 | 0.123596 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5df87897d1716599fe392b7718ddae22918b25f4 | 51,820 | py | Python | modules_lattice.py | xdrshjr/CivilWospERT | 1cd6d2f90da354e3a29203b2cd9f3348149bc64d | [
"MIT"
] | 1 | 2021-03-24T09:30:25.000Z | 2021-03-24T09:30:25.000Z | modules_lattice.py | xdrshjr/PSFLAT_civil | 1cd6d2f90da354e3a29203b2cd9f3348149bc64d | [
"MIT"
] | null | null | null | modules_lattice.py | xdrshjr/PSFLAT_civil | 1cd6d2f90da354e3a29203b2cd9f3348149bc64d | [
"MIT"
] | null | null | null | import torch.nn as nn
import torch.nn.functional as F
import torch
import math
import collections
from fastNLP import seq_len_to_mask
from lattice.utils import print_info, size2MB,MyDropout
def get_embedding(max_seq_len, embedding_dim, padding_idx=None, rel_pos_init=0):
"""Build sinusoidal embeddings.
This matches the implementation in tensor2tensor, but differs slightly
from the description in Section 3.5 of "Attention Is All You Need".
rel pos init:
如果是0,那么从-max_len到max_len的相对位置编码矩阵就按0-2*max_len来初始化,
如果是1,那么就按-max_len,max_len来初始化
"""
num_embeddings = 2*max_seq_len+1
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
if rel_pos_init == 0:
emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
else:
emb = torch.arange(-max_seq_len,max_seq_len+1, dtype=torch.float).unsqueeze(1)*emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)
if embedding_dim % 2 == 1:
# zero pad
emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
if padding_idx is not None:
emb[padding_idx, :] = 0
return emb
class Four_Pos_Fusion_Embedding(nn.Module):
def __init__(self,pe,four_pos_fusion,pe_ss,pe_se,pe_es,pe_ee,max_seq_len,hidden_size,mode):
super().__init__()
self.mode = mode
self.hidden_size = hidden_size
self.max_seq_len=max_seq_len
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
self.pe = pe
self.four_pos_fusion = four_pos_fusion
if self.four_pos_fusion == 'ff':
self.pos_fusion_forward = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size),
nn.ReLU(inplace=True))
if self.four_pos_fusion == 'ff_linear':
self.pos_fusion_forward = nn.Linear(self.hidden_size*4,self.hidden_size)
elif self.four_pos_fusion == 'ff_two':
self.pos_fusion_forward = nn.Sequential(nn.Linear(self.hidden_size*2,self.hidden_size),
nn.ReLU(inplace=True))
elif self.four_pos_fusion == 'attn':
self.w_r = nn.Linear(self.hidden_size,self.hidden_size)
self.pos_attn_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*4),
nn.ReLU(),
nn.Linear(self.hidden_size*4,4),
nn.Softmax(dim=-1))
# print('暂时不支持以attn融合pos信息')
elif self.four_pos_fusion == 'gate':
self.w_r = nn.Linear(self.hidden_size,self.hidden_size)
self.pos_gate_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*2),
nn.ReLU(),
nn.Linear(self.hidden_size*2,4*self.hidden_size))
# print('暂时不支持以gate融合pos信息')
# exit(1208)
def forward(self,pos_s,pos_e):
batch = pos_s.size(0)
#这里的seq_len已经是之前的seq_len+lex_num了
pos_ss = pos_s.unsqueeze(-1)-pos_s.unsqueeze(-2)
pos_se = pos_s.unsqueeze(-1)-pos_e.unsqueeze(-2)
pos_es = pos_e.unsqueeze(-1)-pos_s.unsqueeze(-2)
pos_ee = pos_e.unsqueeze(-1)-pos_e.unsqueeze(-2)
if self.mode['debug']:
print('pos_s:{}'.format(pos_s))
print('pos_e:{}'.format(pos_e))
print('pos_ss:{}'.format(pos_ss))
print('pos_se:{}'.format(pos_se))
print('pos_es:{}'.format(pos_es))
print('pos_ee:{}'.format(pos_ee))
# B prepare relative position encoding
max_seq_len = pos_s.size(1)
# rel_distance = self.seq_len_to_rel_distance(max_seq_len)
# rel_distance_flat = rel_distance.view(-1)
# rel_pos_embedding_flat = self.pe[rel_distance_flat+self.max_seq_len]
# rel_pos_embedding = rel_pos_embedding_flat.view(size=[max_seq_len,max_seq_len,self.hidden_size])
pe_ss = self.pe_ss[(pos_ss).view(-1)+self.max_seq_len].view(size=[batch,max_seq_len,max_seq_len,-1])
pe_se = self.pe_se[(pos_se).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
pe_es = self.pe_es[(pos_es).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
pe_ee = self.pe_ee[(pos_ee).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
# print('pe_ss:{}'.format(pe_ss.size()))
if self.four_pos_fusion == 'ff':
pe_4 = torch.cat([pe_ss,pe_se,pe_es,pe_ee],dim=-1)
if self.mode['gpumm']:
print('四个位置合起来:{},{}'.format(pe_4.size(),size2MB(pe_4.size())))
rel_pos_embedding = self.pos_fusion_forward(pe_4)
if self.four_pos_fusion == 'ff_linear':
pe_4 = torch.cat([pe_ss,pe_se,pe_es,pe_ee],dim=-1)
if self.mode['gpumm']:
print('四个位置合起来:{},{}'.format(pe_4.size(),size2MB(pe_4.size())))
rel_pos_embedding = self.pos_fusion_forward(pe_4)
if self.four_pos_fusion == 'ff_two':
pe_2 = torch.cat([pe_ss,pe_ee],dim=-1)
if self.mode['gpumm']:
print('2个位置合起来:{},{}'.format(pe_2.size(),size2MB(pe_2.size())))
rel_pos_embedding = self.pos_fusion_forward(pe_2)
elif self.four_pos_fusion == 'attn':
pe_4 = torch.cat([pe_ss,pe_se,pe_es,pe_ee],dim=-1)
attn_score = self.pos_attn_score(pe_4)
pe_4_unflat = self.w_r(pe_4.view(batch,max_seq_len,max_seq_len,4,self.hidden_size))
pe_4_fusion = (attn_score.unsqueeze(-1) * pe_4_unflat).sum(dim=-2)
rel_pos_embedding = pe_4_fusion
if self.mode['debug']:
print('pe_4照理说应该是 Batch * SeqLen * SeqLen * HiddenSize')
print(pe_4_fusion.size())
elif self.four_pos_fusion == 'gate':
pe_4 = torch.cat([pe_ss, pe_se, pe_es, pe_ee], dim=-1)
gate_score = self.pos_gate_score(pe_4).view(batch,max_seq_len,max_seq_len,4,self.hidden_size)
gate_score = F.softmax(gate_score,dim=-2)
pe_4_unflat = self.w_r(pe_4.view(batch, max_seq_len, max_seq_len, 4, self.hidden_size))
pe_4_fusion = (gate_score * pe_4_unflat).sum(dim=-2)
rel_pos_embedding = pe_4_fusion
return rel_pos_embedding
class MultiHead_Attention_Lattice_rel_save_gpumm(nn.Module):
def __init__(self, hidden_size, num_heads, pe,
pe_ss,pe_se,pe_es,pe_ee,
scaled=True, max_seq_len=-1,
dvc=None,mode=collections.defaultdict(bool),k_proj=True,q_proj=True,v_proj=True,r_proj=True,
attn_dropout=None,
ff_final=True,
four_pos_fusion=None):
'''
:param hidden_size:
:param num_heads:
:param scaled:
:param debug:
:param max_seq_len:
:param device:
'''
super().__init__()
assert four_pos_fusion is not None
self.four_pos_fusion = four_pos_fusion
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
self.mode = mode
if self.mode['debug']:
print_info('rel pos attn')
self.hidden_size = hidden_size
self.num_heads = num_heads
self.per_head_size = self.hidden_size // self.num_heads
self.scaled = scaled
self.max_seq_len = max_seq_len
if dvc is None:
dvc = torch.device('cpu')
self.dvc = dvc
assert (self.per_head_size * self.num_heads == self.hidden_size)
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
self.r_proj=r_proj
if self.four_pos_fusion == 'ff':
self.pos_fusion_forward = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size),
nn.ReLU(inplace=True))
elif self.four_pos_fusion == 'attn':
self.pos_attn_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*4),
nn.ReLU(),
nn.Linear(self.hidden_size*4,4),
nn.Softmax(dim=-1))
# print('暂时不支持以attn融合pos信息')
elif self.four_pos_fusion == 'gate':
self.pos_gate_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*2),
nn.ReLU(),
nn.Linear(self.hidden_size*2,4*self.hidden_size))
# print('暂时不支持以gate融合pos信息')
# exit(1208)
self.w_k = nn.Linear(self.hidden_size, self.hidden_size)
self.w_q = nn.Linear(self.hidden_size, self.hidden_size)
self.w_v = nn.Linear(self.hidden_size, self.hidden_size)
self.w_r = nn.Linear(self.hidden_size, self.hidden_size)
self.w_final = nn.Linear(self.hidden_size, self.hidden_size)
self.u = torch.nn.Parameter(torch.zeros(self.num_heads, self.per_head_size))
self.v = torch.nn.Parameter(torch.zeros(self.num_heads, self.per_head_size))
self.pe = pe
self.dropout = MyDropout(attn_dropout)
if ff_final:
self.ff_final = nn.Linear(self.hidden_size,self.hidden_size)
def forward(self,key, query, value, seq_len, lex_num, pos_s,pos_e,rel_pos_embedding):
batch = pos_e.size(0)
max_seq_len = pos_e.size(1)
#这里的seq_len已经是之前的seq_len+lex_num了
# E prepare relative position encoding
if self.k_proj:
if self.mode['debug']:
print_info('k_proj!')
key = self.w_k(key)
if self.q_proj:
if self.mode['debug']:
print_info('q_proj!')
query = self.w_q(query)
if self.v_proj:
if self.mode['debug']:
print_info('v_proj!')
value = self.w_v(value)
if self.r_proj:
if self.mode['debug']:
print_info('r_proj!')
rel_pos_embedding = self.w_r(rel_pos_embedding)
# batch * seq_len * n_head * d_head
key = torch.reshape(key, [batch, max_seq_len, self.num_heads, self.per_head_size])
query = torch.reshape(query, [batch, max_seq_len, self.num_heads, self.per_head_size])
value = torch.reshape(value, [batch, max_seq_len, self.num_heads, self.per_head_size])
rel_pos_embedding = torch.reshape(rel_pos_embedding,
[batch,max_seq_len, max_seq_len, self.num_heads,self.per_head_size])
# batch * n_head * seq_len * d_head
key = key.transpose(1, 2)
query = query.transpose(1, 2)
value = value.transpose(1, 2)
# batch * n_head * d_head * key_len
key = key.transpose(-1, -2)
# #A
# A_ = torch.matmul(query,key)
# #C
# # key: batch * n_head * d_head * key_len
u_for_c = self.u.unsqueeze(0).unsqueeze(-2)
# u_for_c: 1(batch broadcast) * num_heads * 1 *per_head_size
# key_for_c = key
# C_ = torch.matmul(u_for_c, key)
query_and_u_for_c = query + u_for_c
if self.mode['debug']:
print('query:{}'.format(query.size()))
print('u_for_c:{}'.format(u_for_c.size()))
print('query_and_u_for_c:{}'.format(query_and_u_for_c.size()))
print('key:{}'.format(key.size()))
A_C = torch.matmul(query_and_u_for_c, key)
if self.mode['debug']:
print('query size:{}'.format(query.size()))
print('query_and_u_for_c size:{}'.format(query_and_u_for_c.size()))
#B
rel_pos_embedding_for_b = rel_pos_embedding.permute(0, 3, 1, 4, 2)
# after above, rel_pos_embedding: batch * num_head * query_len * per_head_size * key_len
query_for_b = query.view([batch, self.num_heads, max_seq_len, 1, self.per_head_size])
# after above, query_for_b: batch * num_head * query_len * 1 * per_head_size
# print('query for b:{}'.format(query_for_b.size()))
# print('rel_pos_embedding_for_b{}'.format(rel_pos_embedding_for_b.size()))
# B_ = torch.matmul(query_for_b,rel_pos_embedding_for_b).squeeze(-2)
#D
# rel_pos_embedding_for_d = rel_pos_embedding.unsqueeze(-2)
# after above, rel_pos_embedding: batch * query_seq_len * key_seq_len * num_heads * 1 *per_head_size
# v_for_d = self.v.unsqueeze(-1)
# v_for_d: num_heads * per_head_size * 1
# D_ = torch.matmul(rel_pos_embedding_for_d,v_for_d).squeeze(-1).squeeze(-1).permute(0,3,1,2)
query_for_b_and_v_for_d = query_for_b + self.v.view(1,self.num_heads,1,1,self.per_head_size)
B_D = torch.matmul(query_for_b_and_v_for_d, rel_pos_embedding_for_b).squeeze(-2)
#att_score: Batch * num_heads * query_len * key_len
# A, B C and D is exactly the shape
if self.mode['debug']:
print_info('AC:{}'.format(A_C.size()))
print_info('BD:{}'.format(B_D.size()))
# print_info('A:{}'.format(A_.size()))
# print_info('B:{}'.format(B_.size()))
# print_info('C:{}'.format(C_.size()))
# print_info('D:{}'.format(D_.size()))
attn_score_raw = A_C + B_D
if self.scaled:
attn_score_raw = attn_score_raw / math.sqrt(self.per_head_size)
# mask = seq_len_to_mask(seq_len+lex_num).bool()
# attn_score_raw_masked = attn_score_raw.masked_fill(~mask, -1e15)
attn_score_raw_masked = attn_score_raw
if self.mode['debug']:
print('attn_score_raw_masked:{}'.format(attn_score_raw_masked))
print('seq_len:{}'.format(seq_len))
attn_score = F.softmax(attn_score_raw_masked,dim=-1)
attn_score = self.dropout(attn_score)
value_weighted_sum = torch.matmul(attn_score, value)
result = value_weighted_sum.transpose(1,2).contiguous(). \
reshape(batch, max_seq_len, self.hidden_size)
if hasattr(self,'ff_final'):
print('ff_final!!')
result = self.ff_final(result)
return result
def seq_len_to_rel_distance(self,max_seq_len):
'''
:param seq_len: seq_len batch
:return: L*L rel_distance
'''
index = torch.arange(0, max_seq_len)
assert index.size(0) == max_seq_len
assert index.dim() == 1
index = index.repeat(max_seq_len, 1)
offset = torch.arange(0, max_seq_len).unsqueeze(1)
offset = offset.repeat(1, max_seq_len)
index = index - offset
index = index.to(self.dvc)
return index
class MultiHead_Attention_Lattice_rel(nn.Module):
def __init__(self, hidden_size, num_heads, pe,
pe_ss,pe_se,pe_es,pe_ee,
scaled=True, max_seq_len=-1,
dvc=None,mode=collections.defaultdict(bool),k_proj=True,q_proj=True,v_proj=True,r_proj=True,
attn_dropout=None,
ff_final=True,
four_pos_fusion=None):
'''
:param hidden_size:
:param num_heads:
:param scaled:
:param debug:
:param max_seq_len:
:param device:
'''
super().__init__()
assert four_pos_fusion is not None
self.four_pos_fusion = four_pos_fusion
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
self.mode = mode
if self.mode['debug']:
print_info('rel pos attn')
self.hidden_size = hidden_size
self.num_heads = num_heads
self.per_head_size = self.hidden_size // self.num_heads
self.scaled = scaled
self.max_seq_len = max_seq_len
if dvc is None:
dvc = torch.device('cpu')
self.dvc = dvc
assert (self.per_head_size * self.num_heads == self.hidden_size)
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
self.r_proj=r_proj
if self.four_pos_fusion == 'ff':
self.pos_fusion_forward = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size),
nn.ReLU(inplace=True))
elif self.four_pos_fusion == 'attn':
self.pos_attn_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*4),
nn.ReLU(),
nn.Linear(self.hidden_size*4,4),
nn.Softmax(dim=-1))
# print('暂时不支持以attn融合pos信息')
elif self.four_pos_fusion == 'gate':
self.pos_attn_score = nn.Sequential(nn.Linear(self.hidden_size*4,self.hidden_size*2),
nn.ReLU(),
nn.Linear(self.hidden_size*2,4),
nn.Softmax(dim=-1))
print('暂时不支持以gate融合pos信息')
exit(1208)
self.w_k = nn.Linear(self.hidden_size, self.hidden_size)
self.w_q = nn.Linear(self.hidden_size, self.hidden_size)
self.w_v = nn.Linear(self.hidden_size, self.hidden_size)
self.w_r = nn.Linear(self.hidden_size, self.hidden_size)
self.w_final = nn.Linear(self.hidden_size, self.hidden_size)
self.u = nn.Parameter(torch.Tensor(self.num_heads,self.per_head_size))
self.v = nn.Parameter(torch.Tensor(self.num_heads,self.per_head_size))
self.pe = pe
self.dropout = MyDropout(attn_dropout)
if ff_final:
self.ff_final = nn.Linear(self.hidden_size,self.hidden_size)
def forward(self,key, query, value, seq_len, lex_num, pos_s,pos_e):
batch = key.size(0)
#这里的seq_len已经是之前的seq_len+lex_num了
pos_ss = pos_s.unsqueeze(-1)-pos_s.unsqueeze(-2)
pos_se = pos_s.unsqueeze(-1)-pos_e.unsqueeze(-2)
pos_es = pos_e.unsqueeze(-1)-pos_s.unsqueeze(-2)
pos_ee = pos_e.unsqueeze(-1)-pos_e.unsqueeze(-2)
if self.mode['debug']:
print('pos_s:{}'.format(pos_s))
print('pos_e:{}'.format(pos_e))
print('pos_ss:{}'.format(pos_ss))
print('pos_se:{}'.format(pos_se))
print('pos_es:{}'.format(pos_es))
print('pos_ee:{}'.format(pos_ee))
# B prepare relative position encoding
max_seq_len = key.size(1)
# rel_distance = self.seq_len_to_rel_distance(max_seq_len)
# rel_distance_flat = rel_distance.view(-1)
# rel_pos_embedding_flat = self.pe[rel_distance_flat+self.max_seq_len]
# rel_pos_embedding = rel_pos_embedding_flat.view(size=[max_seq_len,max_seq_len,self.hidden_size])
pe_ss = self.pe[(pos_ss).view(-1)+self.max_seq_len].view(size=[batch,max_seq_len,max_seq_len,-1])
pe_se = self.pe[(pos_se).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
pe_es = self.pe[(pos_es).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
pe_ee = self.pe[(pos_ee).view(-1) + self.max_seq_len].view(size=[batch, max_seq_len, max_seq_len, -1])
# print('pe_ss:{}'.format(pe_ss.size()))
if self.four_pos_fusion == 'ff':
pe_4 = torch.cat([pe_ss,pe_se,pe_es,pe_ee],dim=-1)
if self.mode['gpumm']:
print('四个位置合起来:{},{}'.format(pe_4.size(),size2MB(pe_4.size())))
rel_pos_embedding = self.pos_fusion_forward(pe_4)
elif self.four_pos_fusion == 'attn':
pe_4 = torch.cat([pe_ss,pe_se,pe_es,pe_ee],dim=-1)
attn_score = self.pos_attn_score(pe_4)
pe_4_unflat = pe_4.view(batch,max_seq_len,max_seq_len,4,self.hidden_size)
pe_4_fusion = (attn_score.unsqueeze(-1) * pe_4_unflat).sum(-2)
rel_pos_embedding = pe_4_fusion
if self.mode['debug']:
print('pe_4照理说应该是 Batch * SeqLen * SeqLen * HiddenSize')
print(pe_4_fusion.size())
# E prepare relative position encoding
if self.k_proj:
if self.mode['debug']:
print_info('k_proj!')
key = self.w_k(key)
if self.q_proj:
if self.mode['debug']:
print_info('q_proj!')
query = self.w_q(query)
if self.v_proj:
if self.mode['debug']:
print_info('v_proj!')
value = self.w_v(value)
if self.r_proj:
if self.mode['debug']:
print_info('r_proj!')
rel_pos_embedding = self.w_r(rel_pos_embedding)
batch = key.size(0)
max_seq_len = key.size(1)
# batch * seq_len * n_head * d_head
key = torch.reshape(key, [batch, max_seq_len, self.num_heads, self.per_head_size])
query = torch.reshape(query, [batch, max_seq_len, self.num_heads, self.per_head_size])
value = torch.reshape(value, [batch, max_seq_len, self.num_heads, self.per_head_size])
rel_pos_embedding = torch.reshape(rel_pos_embedding,
[batch,max_seq_len, max_seq_len, self.num_heads,self.per_head_size])
# batch * n_head * seq_len * d_head
key = key.transpose(1, 2)
query = query.transpose(1, 2)
value = value.transpose(1, 2)
# batch * n_head * d_head * key_len
key = key.transpose(-1, -2)
#A
A_ = torch.matmul(query,key)
#B
rel_pos_embedding_for_b = rel_pos_embedding.permute(0, 3, 1, 4, 2)
# after above, rel_pos_embedding: batch * num_head * query_len * per_head_size * key_len
query_for_b = query.view([batch, self.num_heads, max_seq_len, 1, self.per_head_size])
# print('query for b:{}'.format(query_for_b.size()))
# print('rel_pos_embedding_for_b{}'.format(rel_pos_embedding_for_b.size()))
B_ = torch.matmul(query_for_b,rel_pos_embedding_for_b).squeeze(-2)
#D
rel_pos_embedding_for_d = rel_pos_embedding.unsqueeze(-2)
# after above, rel_pos_embedding: batch * query_seq_len * key_seq_len * num_heads * 1 *per_head_size
v_for_d = self.v.unsqueeze(-1)
# v_for_d: num_heads * per_head_size * 1
D_ = torch.matmul(rel_pos_embedding_for_d,v_for_d).squeeze(-1).squeeze(-1).permute(0,3,1,2)
#C
# key: batch * n_head * d_head * key_len
u_for_c = self.u.unsqueeze(0).unsqueeze(-2)
# u_for_c: 1(batch broadcast) * num_heads * 1 *per_head_size
key_for_c = key
C_ = torch.matmul(u_for_c, key)
#att_score: Batch * num_heads * query_len * key_len
# A, B C and D is exactly the shape
if self.mode['debug']:
print_info('A:{}'.format(A_.size()))
print_info('B:{}'.format(B_.size()))
print_info('C:{}'.format(C_.size()))
print_info('D:{}'.format(D_.size()))
attn_score_raw = A_ + B_ + C_ + D_
if self.scaled:
attn_score_raw = attn_score_raw / math.sqrt(self.per_head_size)
mask = seq_len_to_mask(seq_len+lex_num).bool().unsqueeze(1).unsqueeze(1)
attn_score_raw_masked = attn_score_raw.masked_fill(~mask, -1e15)
if self.mode['debug']:
print('attn_score_raw_masked:{}'.format(attn_score_raw_masked))
print('seq_len:{}'.format(seq_len))
attn_score = F.softmax(attn_score_raw_masked,dim=-1)
attn_score = self.dropout(attn_score)
value_weighted_sum = torch.matmul(attn_score, value)
result = value_weighted_sum.transpose(1,2).contiguous(). \
reshape(batch, max_seq_len, self.hidden_size)
if hasattr(self,'ff_final'):
print('ff_final!!')
result = self.ff_final(result)
return result
def seq_len_to_rel_distance(self,max_seq_len):
'''
:param seq_len: seq_len batch
:return: L*L rel_distance
'''
index = torch.arange(0, max_seq_len)
assert index.size(0) == max_seq_len
assert index.dim() == 1
index = index.repeat(max_seq_len, 1)
offset = torch.arange(0, max_seq_len).unsqueeze(1)
offset = offset.repeat(1, max_seq_len)
index = index - offset
index = index.to(self.dvc)
return index
class MultiHead_Attention_rel(nn.Module):
def __init__(self, hidden_size, num_heads, pe, scaled=True, max_seq_len=-1,
dvc=None,mode=collections.defaultdict(bool),k_proj=True,q_proj=True,v_proj=True,r_proj=True,
attn_dropout=None,
ff_final=True):
'''
:param hidden_size:
:param num_heads:
:param scaled:
:param debug:
:param max_seq_len:
:param device:
'''
super().__init__()
self.mode=mode
if self.mode['debug']:
print_info('rel pos attn')
self.hidden_size = hidden_size
self.num_heads = num_heads
self.per_head_size = self.hidden_size // self.num_heads
self.scaled = scaled
self.max_seq_len = max_seq_len
if dvc is None:
dvc = torch.device('cpu')
self.dvc = dvc
assert (self.per_head_size * self.num_heads == self.hidden_size)
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
self.r_proj=r_proj
self.w_k = nn.Linear(self.hidden_size, self.hidden_size)
self.w_q = nn.Linear(self.hidden_size, self.hidden_size)
self.w_v = nn.Linear(self.hidden_size, self.hidden_size)
self.w_r = nn.Linear(self.hidden_size, self.hidden_size)
self.w_final = nn.Linear(self.hidden_size, self.hidden_size)
self.u = nn.Parameter(torch.Tensor(self.num_heads,self.per_head_size))
self.v = nn.Parameter(torch.Tensor(self.num_heads,self.per_head_size))
self.pe = pe
self.dropout = MyDropout(attn_dropout)
if ff_final:
self.ff_final = nn.Linear(self.hidden_size,self.hidden_size)
def forward(self,key, query, value, seq_len):
# B prepare relative position encoding
max_seq_len = torch.max(seq_len)
rel_distance = self.seq_len_to_rel_distance(max_seq_len)
rel_distance_flat = rel_distance.view(-1)
rel_pos_embedding_flat = self.pe[rel_distance_flat+self.max_seq_len]
rel_pos_embedding = rel_pos_embedding_flat.view(size=[max_seq_len,max_seq_len,self.hidden_size])
# E prepare relative position encoding
if self.k_proj:
if self.mode['debug']:
print_info('k_proj!')
key = self.w_k(key)
if self.q_proj:
if self.mode['debug']:
print_info('q_proj!')
query = self.w_q(query)
if self.v_proj:
if self.mode['debug']:
print_info('v_proj!')
value = self.w_v(value)
if self.r_proj:
if self.mode['debug']:
print_info('r_proj!')
rel_pos_embedding = self.w_r(rel_pos_embedding)
batch = key.size(0)
max_seq_len = key.size(1)
# batch * seq_len * n_head * d_head
key = torch.reshape(key, [batch, max_seq_len, self.num_heads, self.per_head_size])
query = torch.reshape(query, [batch, max_seq_len, self.num_heads, self.per_head_size])
value = torch.reshape(value, [batch, max_seq_len, self.num_heads, self.per_head_size])
rel_pos_embedding = torch.reshape(rel_pos_embedding,
[max_seq_len, max_seq_len, self.num_heads,self.per_head_size])
# batch * n_head * seq_len * d_head
key = key.transpose(1, 2)
query = query.transpose(1, 2)
value = value.transpose(1, 2)
# batch * n_head * d_head * key_len
key = key.transpose(-1, -2)
#A
A_ = torch.matmul(query,key)
#B
rel_pos_embedding_for_b = rel_pos_embedding.unsqueeze(0).permute(0, 3, 1, 4, 2)
# after above, rel_pos_embedding: batch * num_head * query_len * per_head_size * key_len
query_for_b = query.view([batch, self.num_heads, max_seq_len, 1, self.per_head_size])
# print('query for b:{}'.format(query_for_b.size()))
# print('rel_pos_embedding_for_b{}'.format(rel_pos_embedding_for_b.size()))
B_ = torch.matmul(query_for_b,rel_pos_embedding_for_b).squeeze(-2)
#D
rel_pos_embedding_for_d = rel_pos_embedding.unsqueeze(-2)
# after above, rel_pos_embedding: query_seq_len * key_seq_len * num_heads * 1 *per_head_size
v_for_d = self.v.unsqueeze(-1)
# v_for_d: num_heads * per_head_size * 1
D_ = torch.matmul(rel_pos_embedding_for_d,v_for_d).squeeze(-1).squeeze(-1).permute(2,0,1).unsqueeze(0)
#C
# key: batch * n_head * d_head * key_len
u_for_c = self.u.unsqueeze(0).unsqueeze(-2)
# u_for_c: 1(batch broadcast) * num_heads * 1 *per_head_size
key_for_c = key
C_ = torch.matmul(u_for_c, key)
#att_score: Batch * num_heads * query_len * key_len
# A, B C and D is exactly the shape
if self.mode['debug']:
print_info('A:{}'.format(A_.size()))
print_info('B:{}'.format(B_.size()))
print_info('C:{}'.format(C_.size()))
print_info('D:{}'.format(D_.size()))
attn_score_raw = A_ + B_ + C_ + D_
if self.scaled:
attn_score_raw = attn_score_raw / math.sqrt(self.per_head_size)
mask = seq_len_to_mask(seq_len).bool().unsqueeze(1).unsqueeze(1)
attn_score_raw_masked = attn_score_raw.masked_fill(~mask, -1e15)
if self.mode['debug']:
print('attn_score_raw_masked:{}'.format(attn_score_raw_masked))
print('seq_len:{}'.format(seq_len))
attn_score = F.softmax(attn_score_raw_masked,dim=-1)
attn_score = self.dropout(attn_score)
value_weighted_sum = torch.matmul(attn_score, value)
result = value_weighted_sum.transpose(1,2).contiguous(). \
reshape(batch, max_seq_len, self.hidden_size)
if hasattr(self,'ff_final'):
print('ff_final!!')
result = self.ff_final(result)
return result
def seq_len_to_rel_distance(self,max_seq_len):
'''
:param seq_len: seq_len batch
:return: L*L rel_distance
'''
index = torch.arange(0, max_seq_len)
assert index.size(0) == max_seq_len
assert index.dim() == 1
index = index.repeat(max_seq_len, 1)
offset = torch.arange(0, max_seq_len).unsqueeze(1)
offset = offset.repeat(1, max_seq_len)
index = index - offset
index = index.to(self.dvc)
return index
class MultiHead_Attention(nn.Module):
def __init__(self, hidden_size, num_heads, scaled=True,mode=collections.defaultdict(bool), k_proj=True,q_proj=True,v_proj=True,
attn_dropout=None,ff_final=True):
super().__init__()
#这个模型接受的输入本身是带有位置信息的,适用于transformer的绝对位置编码模式
# TODO: attention dropout
self.hidden_size = hidden_size
self.num_heads = num_heads
self.per_head_size = self.hidden_size // self.num_heads
self.scaled = scaled
assert (self.per_head_size * self.num_heads == self.hidden_size)
self.w_k = nn.Linear(self.hidden_size, self.hidden_size)
self.w_q = nn.Linear(self.hidden_size, self.hidden_size)
self.w_v = nn.Linear(self.hidden_size, self.hidden_size)
if ff_final:
self.ff_final = nn.Linear(self.hidden_size, self.hidden_size)
self.mode = mode
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
if self.mode['debug']:
print_info('abs pos attn')
if attn_dropout == None:
dropout = collections.defaultdict(int)
self.dropout = MyDropout(attn_dropout)
def forward(self, key, query, value, seq_len, lex_num=0):
if self.k_proj:
key = self.w_k(key)
if self.q_proj:
query = self.w_q(query)
if self.v_proj:
value = self.w_v(value)
batch = key.size(0)
max_seq_len = key.size(1)
key = torch.reshape(key, [batch, max_seq_len, self.num_heads, self.per_head_size])
query = torch.reshape(query, [batch, max_seq_len, self.num_heads, self.per_head_size])
value = torch.reshape(value, [batch, max_seq_len, self.num_heads, self.per_head_size])
key = key.transpose(1, 2)
query = query.transpose(1, 2)
value = value.transpose(1, 2)
key = key.transpose(-1, -2)
attention_raw = torch.matmul(query, key)
if self.scaled:
attention_raw = attention_raw / math.sqrt(self.per_head_size)
# if self.mode['debug']:
# print('attention_raw:{}'.format(attention_raw.size()))
# print('mask:{},{}'.format(mask.size(),mask.dtype))
# print('mask==0:{}'.format((mask==0).dtype))
mask = seq_len_to_mask(seq_len + lex_num).bool().unsqueeze(1).unsqueeze(1)
attention_raw_masked = attention_raw.masked_fill(~mask, -1e15)
attn_score = F.softmax(attention_raw_masked, dim=-1)
attn_score = self.dropout(attn_score)
# TODO attention dropout
value_weighted_sum = torch.matmul(attn_score, value)
result = value_weighted_sum.transpose(1, 2).contiguous(). \
reshape(batch, max_seq_len, self.hidden_size)
if hasattr(self,'ff_final'):
result = self.ff_final(result)
return result
class Positionwise_FeedForward(nn.Module):
def __init__(self, sizes, dropout=None,ff_activate='relu'):
super().__init__()
self.num_layers = len(sizes)-1
for i in range(self.num_layers):
setattr(self, 'w' + str(i), nn.Linear(sizes[i], sizes[i + 1]))
if dropout == None:
dropout = collections.defaultdict(int)
self.dropout = MyDropout(0.15)
self.dropout_2 = MyDropout(0.15)
if ff_activate == 'relu':
self.activate = nn.ReLU(inplace=True)
elif ff_activate == 'leaky':
self.activate = nn.LeakyReLU(inplace=True)
def forward(self, inp):
output = inp
for i in range(self.num_layers):
if i != 0:
output = self.activate(output)
w = getattr(self, 'w' + str(i))
output = w(output)
if i == 0:
output = self.dropout(output)
if i == 1:
output = self.dropout_2(output)
return output
class Absolute_Position_Embedding(nn.Module):
def __init__(self,hidden_size,max_len=5000,learnable=False,mode=collections.defaultdict(bool),pos_norm=False):
'''
:param hidden_size:
:param max_len:
:param learnable:
:param debug:
'''
super().__init__()
self.pos_norm = pos_norm
self.mode=mode
pe = Absolute_Position_Embedding.get_embedding(max_len,hidden_size)
# pe = torch.zeros(max_len, hidden_size,requires_grad=True)
# position = torch.arange(0, max_len).unsqueeze(1).float()
# div_term = torch.exp(torch.arange(0, hidden_size, 2,dtype=torch.float32) *
# -(math.log(10000.0) / float(hidden_size)))
# pe[:, 0::2] = torch.sin((position * div_term))
# pe[:, 1::2] = torch.cos(position * div_term)
pe_sum = pe.sum(dim=-1,keepdim=True)
if self.pos_norm:
with torch.no_grad():
pe = pe / pe_sum
pe = pe.unsqueeze(0)
self.pe = nn.Parameter(pe, requires_grad=learnable)
if self.mode['debug']:
print_info('position embedding:')
print_info(self.pe[:100])
print_info('pe size:{}'.format(self.pe.size()))
print_info('pe avg:{}'.format(torch.sum(self.pe)/(self.pe.size(2)*self.pe.size(1))))
def forward(self,inp):
if self.mode['debug']:
print_info('now in Absolute Position Embedding')
return inp + self.pe[:,:inp.size(1)]
@staticmethod
def get_embedding(num_embeddings, embedding_dim, padding_idx=None):
"""Build sinusoidal embeddings.
This matches the implementation in tensor2tensor, but differs slightly
from the description in Section 3.5 of "Attention Is All You Need".
"""
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)
if embedding_dim % 2 == 1:
# zero pad
emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
if padding_idx is not None:
emb[padding_idx, :] = 0
return emb
class Transformer_Encoder_Layer(nn.Module):
def __init__(self, hidden_size, num_heads,
relative_position, learnable_position, add_position,
layer_preprocess_sequence, layer_postprocess_sequence,
dropout=None, scaled=True, ff_size=-1,mode=collections.defaultdict(bool),
max_seq_len=-1,pe=None,
pe_ss=None, pe_se=None, pe_es=None, pe_ee=None,
dvc=None,
k_proj=True,q_proj=True,v_proj=True,r_proj=True,
attn_ff=True,ff_activate='relu',lattice=False,
four_pos_shared=True,four_pos_fusion=None,four_pos_fusion_embedding=None
):
super().__init__()
self.four_pos_fusion_embedding=four_pos_fusion_embedding
self.four_pos_shared=four_pos_shared
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
self.lattice = lattice
self.hidden_size = hidden_size
self.num_heads = num_heads
self.relative_position = relative_position
if self.relative_position and self.lattice:
assert four_pos_fusion is not None
self.four_pos_fusion = four_pos_fusion
self.learnable_position = learnable_position
self.add_position = add_position
self.layer_preprocess_sequence = layer_preprocess_sequence
self.layer_postprocess_sequence = layer_postprocess_sequence
self.scaled = scaled
self.mode = mode
self.attn_ff = attn_ff
self.ff_activate = ff_activate
if self.relative_position and max_seq_len < 0:
print_info('max_seq_len should be set if relative position encode')
exit(1208)
self.max_seq_len = max_seq_len
if dvc is None:
dvc = torch.device('cpu')
self.dvc = dvc
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
self.r_proj=r_proj
import copy
if self.relative_position:
if pe is None:
pe = get_embedding(max_seq_len,hidden_size,rel_pos_init=self.rel_pos_init)
pe_sum = pe.sum(dim=-1,keepdim=True)
if self.pos_norm:
with torch.no_grad():
pe = pe/pe_sum
self.pe = nn.Parameter(pe, requires_grad=self.learnable_position)
if self.four_pos_shared:
self.pe_ss = self.pe
self.pe_se = self.pe
self.pe_es = self.pe
self.pe_ee = self.pe
else:
self.pe_ss = nn.Parameter(copy.deepcopy(pe),requires_grad=self.learnable_position)
self.pe_se = nn.Parameter(copy.deepcopy(pe),requires_grad=self.learnable_position)
self.pe_es = nn.Parameter(copy.deepcopy(pe),requires_grad=self.learnable_position)
self.pe_ee = nn.Parameter(copy.deepcopy(pe),requires_grad=self.learnable_position)
else:
self.pe = pe
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
if self.four_pos_fusion_embedding is None:
self.four_pos_fusion_embedding = \
Four_Pos_Fusion_Embedding(self.pe,self.four_pos_fusion,self.pe_ss,self.pe_se,self.pe_es,self.pe_ee,
self.max_seq_len,self.hidden_size,self.mode)
# if self.relative_position:
# print('现在还不支持相对编码!')
# exit(1208)
# if not self.add_position:
# print_info('现在还不支持位置编码通过concat的方式加入')
# exit(1208)
if dropout == None:
dropout = collections.defaultdict(int)
self.dropout = collections.defaultdict(int)
if ff_size == -1:
ff_size = hidden_size
self.ff_size = ff_size
# print('dropout:{}'.format(self.dropout))
self.layer_preprocess = Layer_Process(self.layer_preprocess_sequence,self.hidden_size,self.dropout['pre'])
self.layer_postprocess = Layer_Process(self.layer_postprocess_sequence,self.hidden_size,self.dropout['post'])
if self.relative_position:
if not self.lattice:
self.attn = MultiHead_Attention_rel(self.hidden_size, self.num_heads,
pe=self.pe,
scaled=self.scaled,
mode=self.mode,
max_seq_len=self.max_seq_len,
dvc=self.dvc,
k_proj=self.k_proj,
q_proj=self.q_proj,
v_proj=self.v_proj,
r_proj=self.r_proj,
attn_dropout=self.dropout['attn'],
ff_final=self.attn_ff)
else:
self.attn = MultiHead_Attention_Lattice_rel_save_gpumm(self.hidden_size, self.num_heads,
pe=self.pe,
pe_ss=self.pe_ss,
pe_se=self.pe_se,
pe_es=self.pe_es,
pe_ee=self.pe_ee,
scaled=self.scaled,
mode=self.mode,
max_seq_len=self.max_seq_len,
dvc=self.dvc,
k_proj=self.k_proj,
q_proj=self.q_proj,
v_proj=self.v_proj,
r_proj=self.r_proj,
attn_dropout=self.dropout['attn'],
ff_final=self.attn_ff,
four_pos_fusion=self.four_pos_fusion)
else:
self.attn = MultiHead_Attention(self.hidden_size, self.num_heads, self.scaled, mode=self.mode,
k_proj=self.k_proj,q_proj=self.q_proj,v_proj=self.v_proj,
attn_dropout=self.dropout['attn'],
ff_final=self.attn_ff)
self.ff = Positionwise_FeedForward([hidden_size, ff_size, hidden_size], self.dropout,ff_activate=self.ff_activate)
def forward(self, inp, seq_len, lex_num=0,pos_s=None,pos_e=None,rel_pos_embedding=None):
output = inp
output = self.layer_preprocess(output)
if self.lattice:
if self.relative_position:
if rel_pos_embedding is None:
rel_pos_embedding = self.four_pos_fusion_embedding(pos_s,pos_e)
output = self.attn(output, output, output, seq_len, pos_s=pos_s, pos_e=pos_e, lex_num=lex_num,
rel_pos_embedding=rel_pos_embedding)
else:
output = self.attn(output, output, output, seq_len, lex_num)
else:
output = self.attn(output, output, output, seq_len)
output = self.layer_postprocess(output)
output = self.layer_preprocess(output)
output = self.ff(output)
output = self.layer_postprocess(output)
return output
class Layer_Process(nn.Module):
def __init__(self, process_sequence, hidden_size, dropout=0, ):
super().__init__()
self.process_sequence = process_sequence.lower()
self.hidden_size = hidden_size
self.dropout_rate = dropout
if 'd' in self.process_sequence:
self.dropout = MyDropout(dropout)
if 'n' in self.process_sequence:
self.layer_norm = nn.LayerNorm(hidden_size)
def forward(self, inp):
output = inp
for op in self.process_sequence:
if op == 'a':
output = output + inp
elif op == 'd':
output = self.dropout(output)
elif op == 'n':
output = self.layer_norm(output)
return output
class Transformer_Encoder(nn.Module):
def __init__(self, hidden_size, num_heads, num_layers,
relative_position, learnable_position, add_position,
layer_preprocess_sequence, layer_postprocess_sequence,
dropout=None, scaled=True, ff_size=-1,
mode=collections.defaultdict(bool),dvc=None,max_seq_len=-1,pe=None,
pe_ss=None,pe_se=None,pe_es=None,pe_ee=None,
k_proj=True,q_proj=True,v_proj=True,r_proj=True,
attn_ff=True,ff_activate='relu',lattice=False,
four_pos_shared=True,four_pos_fusion=None,four_pos_fusion_shared=True):
'''
:param input_size:
:param hidden_size:
:param num_layers:
:param relative_position: bool
:param learnable_position: bool
:param add_position: bool, if False, concat
:param layer_preprocess:
:param layer_postprocess:
'''
super().__init__()
self.four_pos_fusion_shared=four_pos_fusion_shared
self.four_pos_shared = four_pos_shared
self.four_pos_fusion = four_pos_fusion
self.pe = pe
self.pe_ss = pe_ss
self.pe_se = pe_se
self.pe_es = pe_es
self.pe_ee = pe_ee
self.mode = mode
self.max_seq_len = max_seq_len
self.hidden_size = hidden_size
if self.four_pos_fusion_shared:
self.four_pos_fusion_embedding = \
Four_Pos_Fusion_Embedding(self.pe,self.four_pos_fusion,self.pe_ss,self.pe_se,self.pe_es,self.pe_ee,
self.max_seq_len,self.hidden_size,self.mode)
else:
self.four_pos_fusion_embedding = None
self.lattice = lattice
self.num_heads = num_heads
self.num_layers = num_layers
self.relative_position = relative_position
if self.relative_position and self.lattice:
assert four_pos_fusion is not None
self.learnable_position = learnable_position
self.add_position = add_position
self.layer_preprocess_sequence = layer_preprocess_sequence
self.layer_postprocess_sequence = layer_postprocess_sequence
self.scaled = scaled
self.k_proj=k_proj
self.q_proj=q_proj
self.v_proj=v_proj
self.r_proj=r_proj
self.attn_ff = attn_ff
self.ff_activate = ff_activate
if dvc is None:
dvc = torch.device('cpu')
self.dvc = dvc
if self.relative_position and max_seq_len < 0:
print_info('max_seq_len should be set if relative position encode')
exit(1208)
# if self.relative_position:
# print('现在还不支持相对编码!')
# exit(1208)
# if not self.add_position:
# print('现在还不支持位置编码通过concat的方式加入')
# exit(1208)
if dropout == None:
dropout = collections.defaultdict(int)
self.dropout = dropout
if ff_size == -1:
ff_size = hidden_size
self.ff_size = ff_size
for i in range(self.num_layers):
setattr(self, 'layer_{}'.format(i),Transformer_Encoder_Layer(hidden_size, num_heads,
relative_position, learnable_position, add_position,
layer_preprocess_sequence, layer_postprocess_sequence,
dropout,scaled,ff_size,
mode=self.mode,
max_seq_len=self.max_seq_len,
pe=self.pe,
pe_ss=self.pe_ss,
pe_se=self.pe_se,
pe_es=self.pe_es,
pe_ee=self.pe_ee,
k_proj=self.k_proj,
q_proj=self.q_proj,
v_proj=self.v_proj,
r_proj=self.r_proj,
attn_ff=self.attn_ff,
ff_activate=self.ff_activate,
lattice=self.lattice,
four_pos_shared=self.four_pos_shared,
four_pos_fusion=self.four_pos_fusion,
four_pos_fusion_embedding=self.four_pos_fusion_embedding
))
self.layer_preprocess = Layer_Process(self.layer_preprocess_sequence,self.hidden_size)
def forward(self, inp, seq_len,lex_num=0,pos_s=None,pos_e=None):
output = inp
if self.relative_position:
if self.four_pos_fusion_shared and self.lattice:
rel_pos_embedding = self.four_pos_fusion_embedding(pos_s,pos_e)
else:
rel_pos_embedding = None
else:
rel_pos_embedding = None
for i in range(self.num_layers):
now_layer = getattr(self,'layer_{}'.format(i))
output = now_layer(output,seq_len,lex_num=lex_num,pos_s=pos_s,pos_e=pos_e,
rel_pos_embedding=rel_pos_embedding)
output = self.layer_preprocess(output)
return output
| 41.824052 | 131 | 0.57582 | 6,991 | 51,820 | 3.945644 | 0.037334 | 0.039588 | 0.043395 | 0.038501 | 0.88526 | 0.842916 | 0.820874 | 0.796114 | 0.785419 | 0.762253 | 0 | 0.012402 | 0.315342 | 51,820 | 1,238 | 132 | 41.857835 | 0.765072 | 0.112254 | 0 | 0.772093 | 0 | 0 | 0.026185 | 0.001586 | 0 | 0 | 0 | 0.000808 | 0.016279 | 1 | 0.02907 | false | 0 | 0.009302 | 0 | 0.067442 | 0.081395 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d37e1c16b891b60c3e3550d925fd3850b2251f0 | 561,672 | py | Python | generation/words_pos_phonemes_9802.py | KyleMaclean/Poetry-Generator | d69e30636403194668e22f87d6aecef24e1aba36 | [
"MIT"
] | null | null | null | generation/words_pos_phonemes_9802.py | KyleMaclean/Poetry-Generator | d69e30636403194668e22f87d6aecef24e1aba36 | [
"MIT"
] | null | null | null | generation/words_pos_phonemes_9802.py | KyleMaclean/Poetry-Generator | d69e30636403194668e22f87d6aecef24e1aba36 | [
"MIT"
] | null | null | null | dictionary = {'a': ('DT', ['AH0']), 'ab': ('NN', ['AE1', 'B']), 'aback': ('NN', ['AH0', 'B', 'AE1', 'K']), 'abacus': ('NN', ['AE1', 'B', 'AH0', 'K', 'AH0', 'S']), 'abalone': ('NN', ['AE2', 'B', 'AH0', 'L', 'OW1', 'N', 'IY0']), 'abandoned': ('VBN', ['AH0', 'B', 'AE1', 'N', 'D', 'AH0', 'N', 'D']), 'abandoning': ('VBG', ['AH0', 'B', 'AE1', 'N', 'D', 'AH0', 'N', 'IH0', 'NG']), 'abandon': ('NN', ['AH0', 'B', 'AE1', 'N', 'D', 'AH0', 'N']), 'abandonment': ('NN', ['AH0', 'B', 'AE1', 'N', 'D', 'AH0', 'N', 'M', 'AH0', 'N', 'T']), 'abashed': ('VBN', ['AH0', 'B', 'AE1', 'SH', 'T']), 'abash': ('NN', ['AH0', 'B', 'AE1', 'SH']), 'abated': ('VBN', ['AH0', 'B', 'EY1', 'T', 'IH0', 'D']), 'abating': ('VBG', ['AH0', 'B', 'EY1', 'T', 'IH0', 'NG']), 'abate': ('NN', ['AH0', 'B', 'EY1', 'T']), 'abatement': ('NN', ['AH0', 'B', 'EY1', 'T', 'M', 'AH0', 'N', 'T']), 'abba': ('NN', ['AE1', 'B', 'AH0']), 'abbe': ('NN', ['AE1', 'B', 'IY0']), 'abbey': ('NN', ['AE1', 'B', 'IY0']), 'abbot': ('NN', ['AE1', 'B', 'AH0', 'T']), 'abbreviated': ('VBN', ['AH0', 'B', 'R', 'IY1', 'V', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'abbreviating': ('VBG', ['AH0', 'B', 'R', 'IY1', 'V', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'abbreviate': ('NN', ['AH0', 'B', 'R', 'IY1', 'V', 'IY0', 'EY2', 'T']), 'abbreviation': ('NN', ['AH0', 'B', 'R', 'IY2', 'V', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'abdicated': ('VBN', ['AE1', 'B', 'D', 'AH0', 'K', 'EY2', 'T', 'AH0', 'D']), 'abdicating': ('VBG', ['AE1', 'B', 'D', 'IH0', 'K', 'EY2', 'T', 'IH0', 'NG']), 'abdicate': ('NN', ['AE1', 'B', 'D', 'AH0', 'K', 'EY2', 'T']), 'abdication': ('NN', ['AE2', 'B', 'D', 'IH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'abdomen': ('NNS', ['AE0', 'B', 'D', 'OW1', 'M', 'AH0', 'N']), 'abdominal': ('JJ', ['AE0', 'B', 'D', 'AA1', 'M', 'AH0', 'N', 'AH0', 'L']), 'abducted': ('VBN', ['AE0', 'B', 'D', 'AH1', 'K', 'T', 'IH0', 'D']), 'abducting': ('VBG', ['AE0', 'B', 'D', 'AH1', 'K', 'T', 'IH0', 'NG']), 'abduct': ('NN', ['AE0', 'B', 'D', 'AH1', 'K', 'T']), 'abduction': ('NN', ['AE0', 'B', 'D', 'AH1', 'K', 'SH', 'AH0', 'N']), 'abductor': ('NN', ['AE0', 'B', 'D', 'AH1', 'K', 'T', 'ER0']), 'abed': ('NNS', ['AH0', 'B', 'EH1', 'D']), 'abele': ('NN', ['AH0', 'B', 'IY1', 'L']), 'aberrant': ('NN', ['AE0', 'B', 'EH1', 'R', 'AH0', 'N', 'T']), 'aberration': ('NN', ['AE2', 'B', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'aberrational': ('JJ', ['AE2', 'B', 'ER0', 'EY1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'abetted': ('VBN', ['AH0', 'B', 'EH1', 'T', 'IH0', 'D']), 'abetting': ('VBG', ['AH0', 'B', 'EH1', 'T', 'IH0', 'NG']), 'abet': ('NN', ['AH0', 'B', 'EH1', 'T']), 'abeyance': ('NN', ['AH0', 'B', 'EY1', 'AH0', 'N', 'S']), 'abhorred': ('VBN', ['AH0', 'B', 'HH', 'AO1', 'R', 'D']), 'abhor': ('NN', ['AE0', 'B', 'HH', 'AO1', 'R']), 'abhorrence': ('NN', ['AH0', 'B', 'HH', 'AO1', 'R', 'AH0', 'N', 'S']), 'abhorrent': ('NN', ['AE0', 'B', 'HH', 'AO1', 'R', 'AH0', 'N', 'T']), 'abode': ('NN', ['AH0', 'B', 'OW1', 'D']), 'abid': ('NN', ['EY1', 'B', 'IH0', 'D']), 'abiding': ('VBG', ['AH0', 'B', 'AY1', 'D', 'IH0', 'NG']), 'abide': ('NN', ['AH0', 'B', 'AY1', 'D']), 'abigail': ('NN', ['AE1', 'B', 'AH0', 'G', 'EY2', 'L']), 'abilities': ('NNS', ['AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'ability': ('NN', ['AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'abject': ('NN', ['AE1', 'B', 'JH', 'EH0', 'K', 'T']), 'ablaze': ('NN', ['AH0', 'B', 'L', 'EY1', 'Z']), 'able': ('JJ', ['EY1', 'B', 'AH0', 'L']), 'able-bodied': ('JJ', ['EY1', 'B', 'AH0', 'L', 'B', 'AA1', 'D', 'IY0', 'D']), 'abler': ('NN', ['EY1', 'B', 'AH0', 'L', 'ER0']), 'abloom': ('NN', ['AH0', 'B', 'L', 'UW1', 'M']), 'ably': ('RB', ['EY1', 'B', 'L', 'IY0']), 'abnormal': ('JJ', ['AE0', 'B', 'N', 'AO1', 'R', 'M', 'AH0', 'L']), 'abnormalities': ('NNS', ['AE2', 'B', 'N', 'AO0', 'R', 'M', 'AE1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'abnormality': ('NN', ['AE2', 'B', 'N', 'AO0', 'R', 'M', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'abnormally': ('RB', ['AE0', 'B', 'N', 'AO1', 'R', 'M', 'AH0', 'L', 'IY0']), 'aboard': ('NN', ['AH0', 'B', 'AO1', 'R', 'D']), 'abolished': ('VBN', ['AH0', 'B', 'AA1', 'L', 'IH0', 'SH', 'T']), 'abolishing': ('VBG', ['AH0', 'B', 'AA1', 'L', 'IH0', 'SH', 'IH0', 'NG']), 'abolish': ('NN', ['AH0', 'B', 'AA1', 'L', 'IH0', 'SH']), 'abolition': ('NN', ['AE2', 'B', 'AH0', 'L', 'IH1', 'SH', 'AH0', 'N']), 'abolitionism': ('NN', ['AE2', 'B', 'AH0', 'L', 'IH1', 'SH', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'abolitionist': ('NN', ['AE2', 'B', 'AH0', 'L', 'IH1', 'SH', 'AH0', 'N', 'AH0', 'S', 'T']), 'abominable': ('JJ', ['AH0', 'B', 'AA1', 'M', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'abomination': ('NN', ['AH0', 'B', 'AA2', 'M', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'aboriginal': ('JJ', ['AE2', 'B', 'ER0', 'IH1', 'JH', 'AH0', 'N', 'AH0', 'L']), 'aborigines': ('NNS', ['AE2', 'B', 'ER0', 'IH1', 'JH', 'AH0', 'N', 'IY0', 'Z']), 'abort': ('NN', ['AH0', 'B', 'AO1', 'R', 'T']), 'aborted': ('JJ', ['AH0', 'B', 'AO1', 'R', 'T', 'IH0', 'D']), 'abortifacient': ('NN', ['AH0', 'B', 'AO2', 'R', 'T', 'AH0', 'F', 'EY1', 'SH', 'AH0', 'N', 'T']), 'abortion': ('NN', ['AH0', 'B', 'AO1', 'R', 'SH', 'AH0', 'N']), 'abortionist': ('NN', ['AH0', 'B', 'AO1', 'R', 'SH', 'AH0', 'N', 'IH0', 'S', 'T']), 'abortive': ('JJ', ['AH0', 'B', 'AO1', 'R', 'T', 'IH0', 'V']), 'abounded': ('VBD', ['AH0', 'B', 'AW1', 'N', 'D', 'IH0', 'D']), 'abounding': ('VBG', ['AH0', 'B', 'AW1', 'N', 'D', 'IH0', 'NG']), 'abound': ('NN', ['AH0', 'B', 'AW1', 'N', 'D']), 'about': ('IN', ['AH0', 'B', 'AW1', 'T']), 'above': ('IN', ['AH0', 'B', 'AH1', 'V']), 'aboveboard': ('NN', ['AH0', 'B', 'AH1', 'V', 'B', 'AO2', 'R', 'D']), 'abracadabra': ('NN', ['AE2', 'B', 'R', 'AH0', 'K', 'AH0', 'D', 'AE1', 'B', 'R', 'AH0']), 'abrasion': ('NN', ['AH0', 'B', 'R', 'EY1', 'ZH', 'AH0', 'N']), 'abrasive': ('JJ', ['AH0', 'B', 'R', 'EY1', 'S', 'IH0', 'V']), 'abraxas': ('NN', ['AH0', 'B', 'R', 'AE1', 'K', 'S', 'AH0', 'Z']), 'abreast': ('NN', ['AH0', 'B', 'R', 'EH1', 'S', 'T']), 'abridged': ('VBN', ['AH0', 'B', 'R', 'IH1', 'JH', 'D']), 'abridging': ('VBG', ['AH0', 'B', 'R', 'IH1', 'JH', 'IH0', 'NG']), 'abridge': ('NN', ['AH0', 'B', 'R', 'IH1', 'JH']), 'abroad': ('RB', ['AH0', 'B', 'R', 'AO1', 'D']), 'abrogate': ('NN', ['AE1', 'B', 'R', 'AH0', 'G', 'EY2', 'T']), 'abrogated': ('VBN', ['AE1', 'B', 'R', 'AH0', 'G', 'EY2', 'T', 'IH0', 'D']), 'abrogating': ('VBG', ['AE1', 'B', 'R', 'AH0', 'G', 'EY2', 'T', 'IH0', 'NG']), 'abrogation': ('NN', ['AE2', 'B', 'R', 'AH0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'abrupt': ('NN', ['AH0', 'B', 'R', 'AH1', 'P', 'T']), 'abruptly': ('RB', ['AH0', 'B', 'R', 'AH1', 'P', 'T', 'L', 'IY0']), 'abruptness': ('NN', ['AH0', 'B', 'R', 'AH1', 'P', 'T', 'N', 'AH0', 'S']), 'abscess': ('NN', ['AE1', 'B', 'S', 'EH2', 'S']), 'absconded': ('VBD', ['AE0', 'B', 'S', 'K', 'AA1', 'N', 'D', 'AH0', 'D']), 'absconding': ('VBG', ['AE0', 'B', 'S', 'K', 'AA1', 'N', 'D', 'IH0', 'NG']), 'abscond': ('NN', ['AE0', 'B', 'S', 'K', 'AA1', 'N', 'D']), 'absence': ('NN', ['AE1', 'B', 'S', 'AH0', 'N', 'S']), 'absent': ('NN', ['AE1', 'B', 'S', 'AH0', 'N', 'T']), 'absentee': ('NN', ['AE2', 'B', 'S', 'AH0', 'N', 'T', 'IY1']), 'absenteeism': ('NN', ['AE2', 'B', 'S', 'AH0', 'N', 'T', 'IY1', 'IH0', 'Z', 'AH0', 'M']), 'absinthe': ('NN', ['AE1', 'B', 'S', 'IH0', 'N', 'TH']), 'absolute': ('NN', ['AE1', 'B', 'S', 'AH0', 'L', 'UW2', 'T']), 'absolutely': ('RB', ['AE2', 'B', 'S', 'AH0', 'L', 'UW1', 'T', 'L', 'IY0']), 'absoluteness': ('NN', ['AE1', 'B', 'S', 'AH0', 'L', 'UW2', 'T', 'N', 'AH0', 'S']), 'absolution': ('NN', ['AE2', 'B', 'S', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N']), 'absolutism': ('NN', ['AE1', 'B', 'S', 'AH0', 'L', 'UW2', 'T', 'IH2', 'Z', 'AH0', 'M']), 'absolutist': ('NN', ['AE0', 'B', 'S', 'IH0', 'L', 'UW1', 'T', 'IH0', 'S', 'T']), 'absolved': ('VBN', ['AH0', 'B', 'Z', 'AA1', 'L', 'V', 'D']), 'absolving': ('VBG', ['AH0', 'B', 'Z', 'AA1', 'L', 'V', 'IH0', 'NG']), 'absolve': ('NN', ['AH0', 'B', 'Z', 'AA1', 'L', 'V']), 'absorbed': ('NNS', ['AH0', 'B', 'Z', 'AO1', 'R', 'B', 'D']), 'absorbing': ('VBG', ['AH0', 'B', 'Z', 'AO1', 'R', 'B', 'IH0', 'NG']), 'absorb': ('NN', ['AH0', 'B', 'Z', 'AO1', 'R', 'B']), 'absorbency': ('NN', ['AH0', 'B', 'Z', 'AO1', 'R', 'B', 'AH0', 'N', 'S', 'IY0']), 'absorbent': ('NN', ['AH0', 'B', 'Z', 'AO1', 'R', 'B', 'AH0', 'N', 'T']), 'absorber': ('NN', ['AH0', 'B', 'Z', 'AO1', 'R', 'B', 'ER0']), 'absorption': ('NN', ['AH0', 'B', 'Z', 'AO1', 'R', 'P', 'SH', 'AH0', 'N']), 'abstained': ('VBN', ['AH0', 'B', 'S', 'T', 'EY1', 'N', 'D']), 'abstaining': ('VBG', ['AH0', 'B', 'S', 'T', 'EY1', 'N', 'IH0', 'NG']), 'abstain': ('NN', ['AH0', 'B', 'S', 'T', 'EY1', 'N']), 'abstention': ('NN', ['AH0', 'B', 'S', 'T', 'EH1', 'N', 'CH', 'AH0', 'N']), 'abstinence': ('NN', ['AE1', 'B', 'S', 'T', 'AH0', 'N', 'AH0', 'N', 'S']), 'abstinent': ('NN', ['AE1', 'B', 'S', 'T', 'AH0', 'N', 'AH0', 'N', 'T']), 'abstract': ('NN', ['AE0', 'B', 'S', 'T', 'R', 'AE1', 'K', 'T']), 'abstracted': ('VBN', ['AE1', 'B', 'S', 'T', 'R', 'AE2', 'K', 'T', 'IH0', 'D']), 'abstraction': ('NN', ['AE0', 'B', 'S', 'T', 'R', 'AE1', 'K', 'SH', 'AH0', 'N']), 'abstruse': ('NN', ['AH0', 'B', 'S', 'T', 'R', 'UW1', 'S']), 'absurd': ('NN', ['AH0', 'B', 'S', 'ER1', 'D']), 'absurdities': ('NNS', ['AH0', 'B', 'S', 'ER1', 'D', 'AH0', 'T', 'IY0', 'Z']), 'absurdity': ('NN', ['AH0', 'B', 'S', 'ER1', 'D', 'AH0', 'T', 'IY0']), 'absurdly': ('RB', ['AH0', 'B', 'S', 'ER1', 'D', 'L', 'IY0']), 'abundance': ('NN', ['AH0', 'B', 'AH1', 'N', 'D', 'AH0', 'N', 'S']), 'abundant': ('NN', ['AH0', 'B', 'AH1', 'N', 'D', 'AH0', 'N', 'T']), 'abundantly': ('RB', ['AH0', 'B', 'AH1', 'N', 'D', 'AH0', 'N', 'T', 'L', 'IY0']), 'abused': ('VBN', ['AH0', 'B', 'Y', 'UW1', 'Z', 'D']), 'abusing': ('VBG', ['AH0', 'B', 'Y', 'UW1', 'Z', 'IH0', 'NG']), 'abuse': ('NN', ['AH0', 'B', 'Y', 'UW1', 'S']), 'abuser': ('NN', ['AH0', 'B', 'Y', 'UW1', 'Z', 'ER0']), 'abusive': ('JJ', ['AH0', 'B', 'Y', 'UW1', 'S', 'IH0', 'V']), 'abutted': ('VBN', ['AH0', 'B', 'AH1', 'T', 'AH0', 'D']), 'abutting': ('VBG', ['AH0', 'B', 'AH1', 'T', 'IH0', 'NG']), 'abut': ('NN', ['AH0', 'B', 'AH1', 'T']), 'abuzz': ('NN', ['AH0', 'B', 'AH1', 'Z']), 'abysmal': ('NN', ['AH0', 'B', 'IH1', 'Z', 'M', 'AH0', 'L']), 'abysmally': ('RB', ['AH0', 'B', 'IH1', 'Z', 'M', 'AH0', 'L', 'IY0']), 'abyss': ('NN', ['AH0', 'B', 'IH1', 'S']), 'abyssinian': ('JJ', ['AE0', 'B', 'S', 'IH1', 'N', 'IY2', 'AH0', 'N']), 'acacia': ('NN', ['AH0', 'K', 'EY1', 'SH', 'AH0']), 'academe': ('NN', ['AE1', 'K', 'AH0', 'D', 'IY2', 'M']), 'academic': ('JJ', ['AE2', 'K', 'AH0', 'D', 'EH1', 'M', 'IH0', 'K']), 'academically': ('RB', ['AE2', 'K', 'AH0', 'D', 'EH1', 'M', 'IH0', 'K', 'L', 'IY0']), 'academician': ('JJ', ['AE2', 'K', 'AH0', 'D', 'AH0', 'M', 'IH1', 'SH', 'AH0', 'N']), 'academies': ('NNS', ['AH0', 'K', 'AE1', 'D', 'AH0', 'M', 'IY0', 'Z']), 'academy': ('NN', ['AH0', 'K', 'AE1', 'D', 'AH0', 'M', 'IY0']), 'acantha': ('NN', ['AA0', 'K', 'AA1', 'N', 'DH', 'AH0']), 'acceded': ('VBD', ['AE0', 'K', 'S', 'IY1', 'D', 'IH0', 'D']), 'acceding': ('VBG', ['AE0', 'K', 'S', 'IY1', 'D', 'IH0', 'NG']), 'accede': ('NN', ['AE0', 'K', 'S', 'IY1', 'D']), 'accelerated': ('VBN', ['AE0', 'K', 'S', 'EH1', 'L', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'accelerating': ('VBG', ['AE0', 'K', 'S', 'EH1', 'L', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'accelerate': ('NN', ['AE0', 'K', 'S', 'EH1', 'L', 'ER0', 'EY2', 'T']), 'acceleration': ('NN', ['AE2', 'K', 'S', 'EH2', 'L', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'accelerator': ('NN', ['AE0', 'K', 'S', 'EH1', 'L', 'ER0', 'EY2', 'T', 'ER0']), 'accelerometer': ('NN', ['AE0', 'K', 'S', 'EH2', 'L', 'ER0', 'AA1', 'M', 'AH0', 'T', 'ER0']), 'accent': ('NN', ['AH0', 'K', 'S', 'EH1', 'N', 'T']), 'accented': ('VBN', ['AE1', 'K', 'S', 'EH0', 'N', 'T', 'IH0', 'D']), 'accenting': ('VBG', ['AE1', 'K', 'S', 'EH0', 'N', 'T', 'IH0', 'NG']), 'accentuated': ('VBN', ['AE0', 'K', 'S', 'EH1', 'N', 'CH', 'AH0', 'W', 'EY2', 'T', 'IH0', 'D']), 'accentuating': ('VBG', ['AE0', 'K', 'S', 'EH1', 'N', 'CH', 'AH0', 'W', 'EY2', 'T', 'IH0', 'NG']), 'accentuate': ('NN', ['AE0', 'K', 'S', 'EH1', 'N', 'CH', 'UW0', 'EY0', 'T']), 'accepted': ('JJ', ['AE0', 'K', 'S', 'EH1', 'P', 'T', 'IH0', 'D']), 'accepting': ('VBG', ['AE0', 'K', 'S', 'EH1', 'P', 'T', 'IH0', 'NG']), 'accept': ('NN', ['AE0', 'K', 'S', 'EH1', 'P', 'T']), 'acceptability': ('NN', ['AH0', 'K', 'S', 'EH2', 'P', 'T', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'acceptable': ('JJ', ['AE0', 'K', 'S', 'EH1', 'P', 'T', 'AH0', 'B', 'AH0', 'L']), 'acceptably': ('RB', ['AE0', 'K', 'S', 'EH1', 'P', 'T', 'AH0', 'B', 'L', 'IY0']), 'acceptance': ('NN', ['AE0', 'K', 'S', 'EH1', 'P', 'T', 'AH0', 'N', 'S']), 'access': ('NN', ['AE1', 'K', 'S', 'EH2', 'S']), 'accessibility': ('NN', ['AE2', 'K', 'S', 'EH0', 'S', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'accessible': ('JJ', ['AE0', 'K', 'S', 'EH1', 'S', 'AH0', 'B', 'AH0', 'L']), 'accession': ('NN', ['AH0', 'K', 'S', 'EH1', 'SH', 'AH0', 'N']), 'accessory': ('NN', ['AE0', 'K', 'S', 'EH1', 'S', 'ER0', 'IY0']), 'accessories': ('NNS', ['AE0', 'K', 'S', 'EH1', 'S', 'ER0', 'IY0', 'Z']), 'accident': ('NN', ['AE1', 'K', 'S', 'AH0', 'D', 'AH0', 'N', 'T']), 'accidental': ('NN', ['AE2', 'K', 'S', 'AH0', 'D', 'EH1', 'N', 'T', 'AH0', 'L']), 'accidentally': ('RB', ['AE2', 'K', 'S', 'AH0', 'D', 'EH1', 'N', 'T', 'AH0', 'L', 'IY0']), 'acclaim': ('NN', ['AH0', 'K', 'L', 'EY1', 'M']), 'acclamation': ('NN', ['AE2', 'K', 'L', 'AH0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'acclimated': ('VBN', ['AE1', 'K', 'L', 'AH0', 'M', 'EY2', 'T', 'IH0', 'D']), 'acclimate': ('NN', ['AE1', 'K', 'L', 'AH0', 'M', 'EY2', 'T']), 'acclimation': ('NN', ['AE2', 'K', 'L', 'AH0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'accolade': ('NN', ['AE1', 'K', 'AH0', 'L', 'EY2', 'D']), 'accommodated': ('VBN', ['AH0', 'K', 'AA1', 'M', 'AH0', 'D', 'EY2', 'T', 'AH0', 'D']), 'accommodating': ('VBG', ['AH0', 'K', 'AA1', 'M', 'AH0', 'D', 'EY2', 'T', 'IH0', 'NG']), 'accommodate': ('NN', ['AH0', 'K', 'AA1', 'M', 'AH0', 'D', 'EY2', 'T']), 'accommodation': ('NN', ['AH0', 'K', 'AA2', 'M', 'AH0', 'D', 'EY1', 'SH', 'AH0', 'N']), 'accompaniment': ('NN', ['AH0', 'K', 'AH1', 'M', 'P', 'N', 'IH0', 'M', 'AH0', 'N', 'T']), 'accompanist': ('NN', ['AH0', 'K', 'AH1', 'M', 'P', 'AH0', 'N', 'AH0', 'S', 'T']), 'accompanied': ('VBN', ['AH0', 'K', 'AH1', 'M', 'P', 'AH0', 'N', 'IY0', 'D']), 'accompanying': ('VBG', ['AH0', 'K', 'AH1', 'M', 'P', 'AH0', 'N', 'IY0', 'IH0', 'NG']), 'accompany': ('NN', ['AH0', 'K', 'AH1', 'M', 'P', 'AH0', 'N', 'IY0']), 'accomplice': ('NN', ['AH0', 'K', 'AA1', 'M', 'P', 'L', 'AH0', 'S']), 'accomplished': ('VBN', ['AH0', 'K', 'AA1', 'M', 'P', 'L', 'IH0', 'SH', 'T']), 'accomplishing': ('VBG', ['AH0', 'K', 'AA1', 'M', 'P', 'L', 'IH0', 'SH', 'IH0', 'NG']), 'accomplish': ('NN', ['AH0', 'K', 'AA1', 'M', 'P', 'L', 'IH0', 'SH']), 'accomplishment': ('NN', ['AH0', 'K', 'AA1', 'M', 'P', 'L', 'IH0', 'SH', 'M', 'AH0', 'N', 'T']), 'accord': ('NN', ['AH0', 'K', 'AO1', 'R', 'D']), 'accorded': ('VBN', ['AH0', 'K', 'AO1', 'R', 'D', 'IH0', 'D']), 'according': ('VBG', ['AH0', 'K', 'AO1', 'R', 'D', 'IH0', 'NG']), 'accordance': ('NN', ['AH0', 'K', 'AO1', 'R', 'D', 'AH0', 'N', 'S']), 'accordingly': ('RB', ['AH0', 'K', 'AO1', 'R', 'D', 'IH0', 'NG', 'L', 'IY0']), 'accordion': ('NN', ['AH0', 'K', 'AO1', 'R', 'D', 'IY0', 'AH0', 'N']), 'accosted': ('VBN', ['AH0', 'K', 'AA1', 'S', 'T', 'AH0', 'D']), 'accosting': ('VBG', ['AH0', 'K', 'AA1', 'S', 'T', 'IH0', 'NG']), 'accost': ('NN', ['AH0', 'K', 'AO1', 'S', 'T']), 'account': ('NN', ['AH0', 'K', 'AW1', 'N', 'T']), 'accounted': ('VBD', ['AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'D']), 'accounting': ('NN', ['AH0', 'K', 'AW1', 'N', 'T', 'IH0', 'NG']), 'accountability': ('NN', ['AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'B', 'IH0', 'L', 'IH0', 'T', 'IY0']), 'accountable': ('JJ', ['AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'B', 'AH0', 'L']), 'accountancy': ('NN', ['AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'N', 'S', 'IY0']), 'accountant': ('NN', ['AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'N', 'T']), 'accouterments': ('NNS', ['AH0', 'K', 'UW1', 'T', 'ER0', 'M', 'AH0', 'N', 'T', 'S']), 'accoutrements': ('NNS', ['AH0', 'K', 'UW1', 'T', 'ER0', 'M', 'AH0', 'N', 'T', 'S']), 'accredited': ('VBN', ['AH0', 'K', 'R', 'EH1', 'D', 'IH0', 'T', 'IH0', 'D']), 'accrediting': ('VBG', ['AH0', 'K', 'R', 'EH1', 'D', 'AH0', 'T', 'IH0', 'NG']), 'accredit': ('NN', ['AH0', 'K', 'R', 'EH2', 'D', 'AH0', 'T']), 'accreditation': ('NN', ['AH0', 'K', 'R', 'EH2', 'D', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'accretion': ('NN', ['AH0', 'K', 'R', 'IY1', 'SH', 'AH0', 'N']), 'accrual': ('JJ', ['AH0', 'K', 'R', 'UW1', 'AH0', 'L']), 'accrued': ('VBN', ['AH0', 'K', 'R', 'UW1', 'D']), 'accruing': ('VBG', ['AH0', 'K', 'R', 'UW1', 'IH0', 'NG']), 'accrue': ('NN', ['AH0', 'K', 'R', 'UW1']), 'accumulated': ('VBN', ['AH0', 'K', 'Y', 'UW1', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'D']), 'accumulating': ('VBG', ['AH0', 'K', 'Y', 'UW1', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'accumulate': ('NN', ['AH0', 'K', 'Y', 'UW1', 'M', 'Y', 'AH0', 'L', 'EY2', 'T']), 'accumulation': ('NN', ['AH0', 'K', 'Y', 'UW2', 'M', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'accumulative': ('JJ', ['AH0', 'K', 'Y', 'UW1', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'V']), 'accumulator': ('NN', ['AH0', 'K', 'Y', 'UW1', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'ER0']), 'accuracy': ('NN', ['AE1', 'K', 'Y', 'ER0', 'AH0', 'S', 'IY0']), 'accurate': ('NN', ['AE1', 'K', 'Y', 'ER0', 'AH0', 'T']), 'accurately': ('RB', ['AE1', 'K', 'Y', 'ER0', 'AH0', 'T', 'L', 'IY0']), 'accusation': ('NN', ['AE2', 'K', 'Y', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'accusative': ('JJ', ['AH0', 'K', 'Y', 'UW1', 'Z', 'AH0', 'T', 'IH0', 'V']), 'accusatory': ('NN', ['AH0', 'K', 'Y', 'UW1', 'Z', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'accuse': ('NN', ['AH0', 'K', 'Y', 'UW1', 'Z']), 'accused': ('VBN', ['AH0', 'K', 'Y', 'UW1', 'Z', 'D']), 'accusing': ('VBG', ['AH0', 'K', 'Y', 'UW1', 'Z', 'IH0', 'NG']), 'accuser': ('NN', ['AH0', 'K', 'Y', 'UW1', 'Z', 'ER0']), 'accusingly': ('RB', ['AH0', 'K', 'Y', 'UW1', 'Z', 'IH0', 'NG', 'L', 'IY0']), 'accustomed': ('VBN', ['AH0', 'K', 'AH1', 'S', 'T', 'AH0', 'M', 'D']), 'accustom': ('NN', ['AH0', 'K', 'AH1', 'S', 'T', 'AH0', 'M']), 'aces': ('NNS', ['EY1', 'S', 'IH0', 'Z']), 'ace': ('NN', ['EY1', 'S']), 'acerbic': ('NN', ['AH0', 'S', 'EH1', 'R', 'B', 'IH0', 'K']), 'acetate': ('NN', ['AE1', 'S', 'AH0', 'T', 'EY2', 'T']), 'acetic': ('JJ', ['AH0', 'S', 'EH1', 'T', 'IH0', 'K']), 'acetone': ('NN', ['AE1', 'S', 'AH0', 'T', 'OW2', 'N']), 'acetylene': ('NN', ['AH0', 'S', 'EH1', 'T', 'AH0', 'L', 'IY2', 'N']), 'ache': ('NN', ['EY1', 'K']), 'achaean': ('NN', ['AH0', 'CH', 'IY1', 'AH0', 'N']), 'ached': ('VBN', ['EY1', 'K', 'T']), 'aching': ('VBG', ['EY1', 'K', 'IH0', 'NG']), 'achievable': ('JJ', ['AH0', 'CH', 'IY1', 'V', 'AH0', 'B', 'AH0', 'L']), 'achieved': ('VBN', ['AH0', 'CH', 'IY1', 'V', 'D']), 'achieving': ('VBG', ['AH0', 'CH', 'IY1', 'V', 'IH0', 'NG']), 'achieve': ('NN', ['AH0', 'CH', 'IY1', 'V']), 'achievement': ('NN', ['AH0', 'CH', 'IY1', 'V', 'M', 'AH0', 'N', 'T']), 'achiever': ('NN', ['AH0', 'CH', 'IY1', 'V', 'ER0']), 'acid': ('NN', ['AE1', 'S', 'AH0', 'D']), 'acidic': ('NN', ['AH0', 'S', 'IH1', 'D', 'IH0', 'K']), 'acidification': ('NN', ['AH0', 'S', 'IH2', 'D', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'acidified': ('VBN', ['AH0', 'S', 'IH1', 'D', 'AH0', 'F', 'AY2', 'D']), 'acidify': ('NN', ['AH0', 'S', 'IH1', 'D', 'AH0', 'F', 'AY2']), 'acidity': ('NN', ['AH0', 'S', 'IH1', 'D', 'AH0', 'T', 'IY0']), 'acidly': ('RB', ['AE1', 'S', 'AH0', 'D', 'L', 'IY0']), 'acknowledged': ('VBD', ['AE0', 'K', 'N', 'AA1', 'L', 'IH0', 'JH', 'D']), 'acknowledging': ('VBG', ['AE0', 'K', 'N', 'AA1', 'L', 'IH0', 'JH', 'IH0', 'NG']), 'acknowledge': ('NN', ['AE0', 'K', 'N', 'AA1', 'L', 'IH0', 'JH']), 'acknowledgment': ('NN', ['AE0', 'K', 'N', 'AA1', 'L', 'IH0', 'JH', 'M', 'AH0', 'N', 'T']), 'acme': ('NN', ['AE1', 'K', 'M', 'IY0']), 'acne': ('NN', ['AE1', 'K', 'N', 'IY0']), 'acolyte': ('NN', ['AE1', 'K', 'AH0', 'L', 'AY2', 'T']), 'acorn': ('NN', ['EY1', 'K', 'AO0', 'R', 'N']), 'acoustic': ('JJ', ['AH0', 'K', 'UW1', 'S', 'T', 'IH0', 'K']), 'acoustical': ('JJ', ['AH0', 'K', 'UW1', 'S', 'T', 'IH0', 'K', 'AH0', 'L']), 'acoustically': ('RB', ['AH0', 'K', 'UW1', 'S', 'T', 'IH0', 'K', 'L', 'IY0']), 'acoustics': ('NNS', ['AH0', 'K', 'UW1', 'S', 'T', 'IH0', 'K', 'S']), 'acquaint': ('NN', ['AH0', 'K', 'W', 'EY1', 'N', 'T']), 'acquainted': ('VBN', ['AH0', 'K', 'W', 'EY1', 'N', 'T', 'IH0', 'D']), 'acquaintance': ('NN', ['AH0', 'K', 'W', 'EY1', 'N', 'T', 'AH0', 'N', 'S']), 'acquaintanceship': ('NN', ['AH0', 'K', 'W', 'EY1', 'N', 'T', 'AH0', 'N', 'S', 'SH', 'IH0', 'P']), 'acquiesced': ('VBN', ['AE2', 'K', 'W', 'IY0', 'EH1', 'S', 'T']), 'acquiescing': ('VBG', ['AE2', 'K', 'W', 'IY0', 'EH1', 'S', 'IH0', 'NG']), 'acquiesce': ('NN', ['AE2', 'K', 'W', 'IY0', 'EH1', 'S']), 'acquiescence': ('NN', ['AE2', 'K', 'W', 'IY0', 'EH1', 'S', 'AH0', 'N', 'S']), 'acquired': ('VBN', ['AH0', 'K', 'W', 'AY1', 'ER0', 'D']), 'acquiring': ('VBG', ['AH0', 'K', 'W', 'AY1', 'R', 'IH0', 'NG']), 'acquire': ('VB', ['AH0', 'K', 'W', 'AY1', 'ER0']), 'acquirer': ('NN', ['AH0', 'K', 'W', 'AY1', 'ER0', 'ER0']), 'acquisition': ('NN', ['AE2', 'K', 'W', 'AH0', 'Z', 'IH1', 'SH', 'AH0', 'N']), 'acquisitive': ('JJ', ['AH0', 'K', 'W', 'IH1', 'Z', 'AH0', 'T', 'IH0', 'V']), 'acquit': ('NN', ['AH0', 'K', 'W', 'IH1', 'T']), 'acquitted': ('VBN', ['AH0', 'K', 'W', 'IH1', 'T', 'AH0', 'D']), 'acquitting': ('VBG', ['AH0', 'K', 'W', 'IH1', 'T', 'IH0', 'NG']), 'acquittal': ('NN', ['AH0', 'K', 'W', 'IH1', 'T', 'AH0', 'L']), 'acre': ('NN', ['EY1', 'K', 'ER0']), 'acreage': ('NN', ['EY1', 'K', 'ER0', 'IH0', 'JH']), 'acrid': ('NN', ['AE1', 'K', 'R', 'IH0', 'D']), 'acrimonious': ('JJ', ['AE2', 'K', 'R', 'AH0', 'M', 'OW1', 'N', 'IY0', 'AH0', 'S']), 'acrimony': ('NN', ['AE1', 'K', 'R', 'IH0', 'M', 'OW2', 'N', 'IY0']), 'acrobat': ('NN', ['AE1', 'K', 'R', 'AH0', 'B', 'AE2', 'T']), 'acrobatic': ('JJ', ['AE2', 'K', 'R', 'AH0', 'B', 'AE1', 'T', 'IH0', 'K']), 'acrolein': ('NN', ['AE1', 'K', 'R', 'OW0', 'L', 'IY2', 'N']), 'acropolis': ('NNS', ['AH0', 'K', 'R', 'AA1', 'P', 'AH0', 'L', 'AH0', 'S']), 'across': ('IN', ['AH0', 'K', 'R', 'AO1', 'S']), 'acrylic': ('NN', ['AH0', 'K', 'R', 'IH1', 'L', 'IH0', 'K']), 'act': ('NN', ['AE1', 'K', 'T']), 'acted': ('VBN', ['AE1', 'K', 'T', 'AH0', 'D']), 'acting': ('VBG', ['AE1', 'K', 'T', 'IH0', 'NG']), 'action': ('NN', ['AE1', 'K', 'SH', 'AH0', 'N']), 'actionable': ('JJ', ['AE1', 'K', 'SH', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'activate': ('NN', ['AE1', 'K', 'T', 'AH0', 'V', 'EY2', 'T']), 'active': ('JJ', ['AE1', 'K', 'T', 'IH0', 'V']), 'actively': ('RB', ['AE1', 'K', 'T', 'IH0', 'V', 'L', 'IY0']), 'activities': ('NNS', ['AE0', 'K', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0', 'Z']), 'activity': ('NN', ['AE0', 'K', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0']), 'acton': ('NN', ['AE1', 'K', 'T', 'AH0', 'N']), 'actor': ('NN', ['AE1', 'K', 'T', 'ER0']), 'actress': ('NN', ['AE1', 'K', 'T', 'R', 'AH0', 'S']), 'actual': ('JJ', ['AE1', 'K', 'CH', 'AH0', 'W', 'AH0', 'L']), 'actuality': ('NN', ['AE2', 'K', 'CH', 'AH0', 'W', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'actualize': ('VB', ['AE1', 'K', 'CH', 'AH0', 'W', 'AH0', 'L', 'AY2', 'Z']), 'actually': ('RB', ['AE1', 'K', 'CH', 'AH0', 'W', 'AH0', 'L', 'IY0']), 'actuarial': ('JJ', ['AE2', 'K', 'CH', 'AH0', 'W', 'EH1', 'R', 'IY0', 'AH0', 'L']), 'actuaries': ('NNS', ['AE1', 'K', 'CH', 'AH0', 'W', 'EH2', 'R', 'IY0', 'Z']), 'actuary': ('JJ', ['AE1', 'K', 'CH', 'AH0', 'W', 'EH2', 'R', 'IY0']), 'actuate': ('NN', ['AE1', 'K', 'CH', 'UW0', 'W', 'EY2', 'T']), 'actuator': ('NN', ['AE1', 'K', 'T', 'Y', 'UW0', 'EY2', 'T', 'ER0']), 'acuity': ('NN', ['AH0', 'K', 'Y', 'UW1', 'AH0', 'T', 'IY0']), 'acumen': ('NNS', ['AH0', 'K', 'Y', 'UW1', 'M', 'AH0', 'N']), 'acupuncture': ('NN', ['AE1', 'K', 'Y', 'UW0', 'P', 'AH2', 'NG', 'K', 'CH', 'ER0']), 'acute': ('NN', ['AH0', 'K', 'Y', 'UW1', 'T']), 'acutely': ('RB', ['AH0', 'K', 'Y', 'UW1', 'T', 'L', 'IY0']), 'acuteness': ('NN', ['AH0', 'K', 'Y', 'UW1', 'T', 'N', 'AH0', 'S']), 'adage': ('NN', ['AE1', 'D', 'AH0', 'JH']), 'adagio': ('NN', ['AH0', 'D', 'AA1', 'ZH', 'IY0', 'OW2']), 'adam': ('NN', ['AE1', 'D', 'AH0', 'M']), 'adamant': ('NN', ['AE1', 'D', 'AH0', 'M', 'AH0', 'N', 'T']), 'adapt': ('NN', ['AH0', 'D', 'AE1', 'P', 'T']), 'adapted': ('VBN', ['AH0', 'D', 'AE1', 'P', 'T', 'AH0', 'D']), 'adapting': ('VBG', ['AH0', 'D', 'AE1', 'P', 'T', 'IH0', 'NG']), 'adaptability': ('NN', ['AH0', 'D', 'AE2', 'P', 'T', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'adaptable': ('JJ', ['AH0', 'D', 'AE1', 'P', 'T', 'AH0', 'B', 'AH0', 'L']), 'adaptation': ('NN', ['AE2', 'D', 'AH0', 'P', 'T', 'EY1', 'SH', 'AH0', 'N']), 'adapter': ('NN', ['AH0', 'D', 'AE1', 'P', 'T', 'ER0']), 'adaptive': ('JJ', ['AH0', 'D', 'AE1', 'P', 'T', 'IH0', 'V']), 'adar': ('NN', ['AH0', 'D', 'AA1', 'R']), 'added': ('VBD', ['AE1', 'D', 'AH0', 'D']), 'adding': ('VBG', ['AE1', 'D', 'IH0', 'NG']), 'add': ('VB', ['AE1', 'D']), 'addendum': ('NN', ['AH0', 'D', 'EH1', 'D', 'AH0', 'M']), 'adder': ('NN', ['AE1', 'D', 'ER0']), 'addict': ('NN', ['AH0', 'D', 'IH1', 'K', 'T']), 'addicted': ('VBN', ['AH0', 'D', 'IH1', 'K', 'T', 'AH0', 'D']), 'addicting': ('VBG', ['AH0', 'D', 'IH1', 'K', 'T', 'IH0', 'NG']), 'addiction': ('NN', ['AH0', 'D', 'IH1', 'K', 'SH', 'AH0', 'N']), 'addition': ('NN', ['AH0', 'D', 'IH1', 'SH', 'AH0', 'N']), 'additional': ('JJ', ['AH0', 'D', 'IH1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'additionally': ('RB', ['AH0', 'D', 'IH1', 'SH', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'additive': ('JJ', ['AE1', 'D', 'AH0', 'T', 'IH0', 'V']), 'addle': ('NN', ['AE1', 'D', 'AH0', 'L']), 'addled': ('VBN', ['AE1', 'D', 'AH0', 'L', 'D']), 'addressed': ('VBN', ['AH0', 'D', 'R', 'EH1', 'S', 'T']), 'addressing': ('VBG', ['AH0', 'D', 'R', 'EH1', 'S', 'IH0', 'NG']), 'address': ('NN', ['AE1', 'D', 'R', 'EH2', 'S']), 'addressee': ('NN', ['AE2', 'D', 'R', 'EH0', 'S', 'IY1']), 'adduct': ('NN', ['AE1', 'D', 'AH0', 'K', 'T']), 'adelphia': ('NN', ['AH0', 'D', 'EH1', 'L', 'F', 'IY0', 'AH0']), 'adenoid': ('NN', ['AE1', 'D', 'AH0', 'N', 'OY2', 'D']), 'adept': ('NN', ['AH0', 'D', 'EH1', 'P', 'T']), 'adequacy': ('NN', ['AE1', 'D', 'AH0', 'K', 'W', 'AH0', 'S', 'IY0']), 'adequate': ('NN', ['AE1', 'D', 'AH0', 'K', 'W', 'AH0', 'T']), 'adequately': ('RB', ['AE1', 'D', 'AH0', 'K', 'W', 'AH0', 'T', 'L', 'IY0']), 'adhered': ('VBN', ['AE0', 'D', 'HH', 'IH1', 'R', 'D']), 'adhering': ('VBG', ['AH0', 'D', 'HH', 'IH1', 'R', 'IH0', 'NG']), 'adhere': ('RB', ['AH0', 'D', 'HH', 'IH1', 'R']), 'adherence': ('NN', ['AH0', 'D', 'HH', 'IH1', 'R', 'AH0', 'N', 'S']), 'adherent': ('NN', ['AH0', 'D', 'HH', 'IH1', 'R', 'AH0', 'N', 'T']), 'adhesion': ('NN', ['AE0', 'D', 'HH', 'IY1', 'ZH', 'AH0', 'N']), 'adhesive': ('JJ', ['AE0', 'D', 'HH', 'IY1', 'S', 'IH0', 'V']), 'adieu': ('NN', ['AH0', 'D', 'UW1']), 'adipose': ('NN', ['AE1', 'D', 'AH0', 'P', 'OW2', 'S']), 'adjacent': ('NN', ['AH0', 'JH', 'EY1', 'S', 'AH0', 'N', 'T']), 'adjective': ('JJ', ['AE1', 'JH', 'IH0', 'K', 'T', 'IH0', 'V']), 'adjoining': ('VBG', ['AH0', 'JH', 'OY1', 'N', 'IH0', 'NG']), 'adjoin': ('NN', ['AH0', 'JH', 'OY1', 'N']), 'adjourned': ('VBN', ['AH0', 'JH', 'ER1', 'N', 'D']), 'adjourning': ('VBG', ['AH0', 'JH', 'ER1', 'N', 'IH0', 'NG']), 'adjourn': ('NN', ['AH0', 'JH', 'ER1', 'N']), 'adjournment': ('NN', ['AH0', 'JH', 'ER1', 'N', 'M', 'AH0', 'N', 'T']), 'adjudged': ('VBN', ['AH0', 'JH', 'AH1', 'JH', 'D']), 'adjudge': ('NN', ['AH0', 'JH', 'AH1', 'JH']), 'adjudicated': ('VBN', ['AH0', 'JH', 'UW1', 'D', 'AH0', 'K', 'EY2', 'T', 'IH0', 'D']), 'adjudicating': ('VBG', ['AH0', 'JH', 'UW1', 'D', 'IH0', 'K', 'EY2', 'T', 'IH0', 'NG']), 'adjudicate': ('NN', ['AH0', 'JH', 'UW1', 'D', 'IH0', 'K', 'EY2', 'T']), 'adjudication': ('NN', ['AH0', 'JH', 'UW2', 'D', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'adjunct': ('NN', ['AE1', 'JH', 'AH2', 'NG', 'K', 'T']), 'adjusted': ('VBN', ['AH0', 'JH', 'AH1', 'S', 'T', 'AH0', 'D']), 'adjusting': ('VBG', ['AH0', 'JH', 'AH1', 'S', 'T', 'IH0', 'NG']), 'adjust': ('NN', ['AH0', 'JH', 'AH1', 'S', 'T']), 'adjustable': ('JJ', ['AH0', 'JH', 'AH1', 'S', 'T', 'AH0', 'B', 'AH0', 'L']), 'adjuster': ('NN', ['AH0', 'JH', 'AH1', 'S', 'T', 'ER0']), 'adjustment': ('NN', ['AH0', 'JH', 'AH1', 'S', 'T', 'M', 'AH0', 'N', 'T']), 'adjutant': ('NN', ['AE1', 'JH', 'AH0', 'T', 'AH0', 'N', 'T']), 'administered': ('VBN', ['AH0', 'D', 'M', 'IH1', 'N', 'AH0', 'S', 'T', 'ER0', 'D']), 'administering': ('VBG', ['AE0', 'D', 'M', 'IH1', 'N', 'IH0', 'S', 'T', 'ER0', 'IH0', 'NG']), 'administer': ('NN', ['AH0', 'D', 'M', 'IH1', 'N', 'AH0', 'S', 'T', 'ER0']), 'administrate': ('NN', ['AE0', 'D', 'M', 'IH1', 'N', 'IH0', 'S', 'T', 'R', 'EY2', 'T']), 'administration': ('NN', ['AE0', 'D', 'M', 'IH2', 'N', 'IH0', 'S', 'T', 'R', 'EY1', 'SH', 'AH0', 'N']), 'administrative': ('JJ', ['AH0', 'D', 'M', 'IH1', 'N', 'AH0', 'S', 'T', 'R', 'EY2', 'T', 'IH0', 'V']), 'administrator': ('NN', ['AH0', 'D', 'M', 'IH1', 'N', 'AH0', 'S', 'T', 'R', 'EY2', 'T', 'ER0']), 'admirable': ('JJ', ['AE1', 'D', 'M', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'admirably': ('RB', ['AE1', 'D', 'M', 'ER0', 'AH0', 'B', 'L', 'IY0']), 'admiral': ('JJ', ['AE1', 'D', 'M', 'ER0', 'AH0', 'L']), 'admiralty': ('NN', ['AE1', 'D', 'M', 'ER0', 'AH0', 'L', 'T', 'IY0']), 'admiration': ('NN', ['AE2', 'D', 'M', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'admired': ('VBN', ['AH0', 'D', 'M', 'AY1', 'ER0', 'D']), 'admiring': ('VBG', ['AE0', 'D', 'M', 'AY1', 'R', 'IH0', 'NG']), 'admire': ('NN', ['AE0', 'D', 'M', 'AY1', 'R']), 'admirer': ('NN', ['AE0', 'D', 'M', 'AY1', 'R', 'ER0']), 'admissibility': ('NN', ['AH0', 'D', 'M', 'IH2', 'S', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'admissible': ('JJ', ['AH0', 'D', 'M', 'IH1', 'S', 'AH0', 'B', 'AH0', 'L']), 'admission': ('NN', ['AE0', 'D', 'M', 'IH1', 'SH', 'AH0', 'N']), 'admitted': ('VBN', ['AH0', 'D', 'M', 'IH1', 'T', 'AH0', 'D']), 'admitting': ('VBG', ['AE0', 'D', 'M', 'IH1', 'T', 'IH0', 'NG']), 'admit': ('NN', ['AH0', 'D', 'M', 'IH1', 'T']), 'admittance': ('NN', ['AH0', 'D', 'M', 'IH1', 'T', 'AH0', 'N', 'S']), 'admittedly': ('RB', ['AE0', 'D', 'M', 'IH1', 'T', 'IH0', 'D', 'L', 'IY0']), 'admonished': ('VBN', ['AH0', 'D', 'M', 'AA1', 'N', 'IH0', 'SH', 'T']), 'admonishing': ('VBG', ['AE0', 'D', 'M', 'AA1', 'N', 'IH0', 'SH', 'IH0', 'NG']), 'admonish': ('JJ', ['AE0', 'D', 'M', 'AA1', 'N', 'IH0', 'SH']), 'admonishment': ('NN', ['AE0', 'D', 'M', 'AA1', 'N', 'IH0', 'SH', 'M', 'EH0', 'N', 'T']), 'admonition': ('NN', ['AE2', 'D', 'M', 'AH0', 'N', 'IH1', 'SH', 'AH0', 'N']), 'ado': ('NN', ['AH0', 'D', 'UW1']), 'adobe': ('NN', ['AH0', 'D', 'OW1', 'B', 'IY0']), 'adolescence': ('NN', ['AE2', 'D', 'AH0', 'L', 'EH1', 'S', 'AH0', 'N', 'S']), 'adolescent': ('NN', ['AE2', 'D', 'AH0', 'L', 'EH1', 'S', 'AH0', 'N', 'T']), 'adonis': ('NN', ['AH0', 'D', 'AA1', 'N', 'AH0', 'S']), 'adopted': ('VBN', ['AH0', 'D', 'AA1', 'P', 'T', 'AH0', 'D']), 'adopting': ('VBG', ['AH0', 'D', 'AA1', 'P', 'T', 'IH0', 'NG']), 'adopt': ('NN', ['AH0', 'D', 'AA1', 'P', 'T']), 'adoptable': ('JJ', ['AH0', 'D', 'AA1', 'P', 'T', 'AH0', 'B', 'AH0', 'L']), 'adopter': ('NN', ['AH0', 'D', 'AA1', 'P', 'T', 'ER0']), 'adoption': ('NN', ['AH0', 'D', 'AA1', 'P', 'SH', 'AH0', 'N']), 'adoptive': ('JJ', ['AH0', 'D', 'AA1', 'P', 'T', 'IH0', 'V']), 'adorable': ('JJ', ['AH0', 'D', 'AO1', 'R', 'AH0', 'B', 'AH0', 'L']), 'adoration': ('NN', ['AE2', 'D', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'adoring': ('VBG', ['AH0', 'D', 'AO1', 'R', 'IH0', 'NG']), 'adore': ('NN', ['AH0', 'D', 'AO1', 'R']), 'adorned': ('VBN', ['AH0', 'D', 'AO1', 'R', 'N', 'D']), 'adorning': ('VBG', ['AH0', 'D', 'AO1', 'R', 'N', 'IH0', 'NG']), 'adorn': ('NN', ['AH0', 'D', 'AO1', 'R', 'N']), 'adornment': ('NN', ['AH0', 'D', 'AO1', 'R', 'N', 'M', 'AH0', 'N', 'T']), 'adrenal': ('JJ', ['AH0', 'D', 'R', 'IY1', 'N', 'AH0', 'L']), 'adrian': ('JJ', ['EY1', 'D', 'R', 'IY0', 'AH0', 'N']), 'adriatic': ('JJ', ['EY2', 'D', 'R', 'IY0', 'AE1', 'T', 'IH0', 'K']), 'adrift': ('NN', ['AH0', 'D', 'R', 'IH1', 'F', 'T']), 'adroit': ('NNS', ['AH0', 'D', 'R', 'OY1', 'T']), 'adroitly': ('RB', ['AH0', 'D', 'R', 'OY1', 'T', 'L', 'IY0']), 'adulate': ('NN', ['AE1', 'JH', 'AH0', 'L', 'EY2', 'T']), 'adulation': ('NN', ['AE2', 'JH', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'adult': ('NN', ['AH0', 'D', 'AH1', 'L', 'T']), 'adulterated': ('VBN', ['AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'adulterate': ('NN', ['AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'EY2', 'T']), 'adulteration': ('NN', ['AH0', 'D', 'AH2', 'L', 'T', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'adulterer': ('NN', ['AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'ER0']), 'adulterous': ('JJ', ['AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'AH0', 'S']), 'adultery': ('NN', ['AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'IY0']), 'advanced': ('JJ', ['AH0', 'D', 'V', 'AE1', 'N', 'S', 'T']), 'advancing': ('VBG', ['AH0', 'D', 'V', 'AE1', 'N', 'S', 'IH0', 'NG']), 'advance': ('NN', ['AH0', 'D', 'V', 'AE1', 'N', 'S']), 'advancement': ('NN', ['AH0', 'D', 'V', 'AE1', 'N', 'S', 'M', 'AH0', 'N', 'T']), 'advancer': ('NN', ['AH0', 'D', 'V', 'AE1', 'N', 'S', 'ER0']), 'advantage': ('NN', ['AE0', 'D', 'V', 'AE1', 'N', 'T', 'IH0', 'JH']), 'advantaged': ('VBN', ['AE0', 'D', 'V', 'AE1', 'N', 'T', 'IH0', 'JH', 'D']), 'advantageous': ('JJ', ['AE2', 'D', 'V', 'AH0', 'N', 'T', 'EY1', 'JH', 'AH0', 'S']), 'advent': ('NN', ['AE1', 'D', 'V', 'EH2', 'N', 'T']), 'adventist': ('NN', ['AE1', 'D', 'V', 'EH2', 'N', 'T', 'IH0', 'S', 'T']), 'adventure': ('NN', ['AE0', 'D', 'V', 'EH1', 'N', 'CH', 'ER0']), 'adventurer': ('NN', ['AE0', 'D', 'V', 'EH1', 'N', 'CH', 'ER0', 'ER0']), 'adventuresome': ('NN', ['AE0', 'D', 'V', 'EH1', 'N', 'CH', 'ER0', 'S', 'AH0', 'M']), 'adventurous': ('JJ', ['AE0', 'D', 'V', 'EH1', 'N', 'CH', 'ER0', 'AH0', 'S']), 'adverb': ('NN', ['AE1', 'D', 'V', 'ER0', 'B']), 'adverbial': ('JJ', ['AE0', 'D', 'V', 'ER1', 'B', 'IY0', 'AH0', 'L']), 'adversaries': ('NNS', ['AE1', 'D', 'V', 'ER0', 'S', 'EH2', 'R', 'IY0', 'Z']), 'adversary': ('NN', ['AE1', 'D', 'V', 'ER0', 'S', 'EH2', 'R', 'IY0']), 'adverse': ('NN', ['AE0', 'D', 'V', 'ER1', 'S']), 'adversely': ('RB', ['AE0', 'D', 'V', 'ER1', 'S', 'L', 'IY0']), 'adversity': ('NN', ['AE0', 'D', 'V', 'ER1', 'S', 'IH0', 'T', 'IY0']), 'advertised': ('JJ', ['AE1', 'D', 'V', 'ER0', 'T', 'AY2', 'Z', 'D']), 'advertising': ('NN', ['AE1', 'D', 'V', 'ER0', 'T', 'AY2', 'Z', 'IH0', 'NG']), 'advertise': ('NN', ['AE1', 'D', 'V', 'ER0', 'T', 'AY2', 'Z']), 'advertisement': ('NN', ['AH0', 'D', 'V', 'ER1', 'T', 'AH0', 'Z', 'M', 'AH0', 'N', 'T']), 'advertiser': ('NN', ['AE1', 'D', 'V', 'ER0', 'T', 'AY2', 'Z', 'ER0']), 'advice': ('NN', ['AE0', 'D', 'V', 'AY1', 'S']), 'advisability': ('NN', ['AE2', 'D', 'V', 'AY2', 'Z', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'advisable': ('JJ', ['AH0', 'D', 'V', 'AY1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'advised': ('VBN', ['AE0', 'D', 'V', 'AY1', 'Z', 'D']), 'advising': ('VBG', ['AE0', 'D', 'V', 'AY1', 'Z', 'IH0', 'NG']), 'advise': ('NN', ['AE0', 'D', 'V', 'AY1', 'Z']), 'advisedly': ('RB', ['AE0', 'D', 'V', 'AY1', 'Z', 'AH0', 'D', 'L', 'IY0']), 'advisement': ('NN', ['AH0', 'D', 'V', 'AY1', 'Z', 'M', 'AH0', 'N', 'T']), 'adviser': ('NN', ['AE0', 'D', 'V', 'AY1', 'Z', 'ER0']), 'advisory': ('NN', ['AE0', 'D', 'V', 'AY1', 'Z', 'ER0', 'IY0']), 'advocacy': ('NN', ['AE1', 'D', 'V', 'AH0', 'K', 'AH0', 'S', 'IY0']), 'advocate': ('NN', ['AE1', 'D', 'V', 'AH0', 'K', 'AH0', 'T']), 'advocated': ('VBN', ['AE1', 'D', 'V', 'AH0', 'K', 'EY2', 'T', 'AH0', 'D']), 'advocating': ('VBG', ['AE1', 'D', 'V', 'AH0', 'K', 'EY2', 'T', 'IH0', 'NG']), 'advocation': ('NN', ['AE2', 'D', 'V', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'adz': ('NN', ['AE1', 'D', 'Z']), 'ae': ('NN', ['EY1']), 'aegean': ('NN', ['IH0', 'JH', 'IY1', 'AH0', 'N']), 'aegis': ('NN', ['IY1', 'JH', 'AH0', 'S']), 'aeneid': ('NN', ['AH0', 'N', 'IY1', 'IH0', 'D']), 'aerial': ('JJ', ['EH1', 'R', 'IY0', 'AH0', 'L']), 'aerie': ('NN', ['EH1', 'R', 'IY0']), 'aerodynamic': ('JJ', ['EH2', 'R', 'OW0', 'D', 'AY0', 'N', 'AE1', 'M', 'IH0', 'K']), 'aerodynamics': ('NNS', ['EH2', 'R', 'OW0', 'D', 'AY0', 'N', 'AE1', 'M', 'IH0', 'K', 'S']), 'aeronautic': ('JJ', ['EH2', 'R', 'OW0', 'N', 'AA1', 'T', 'AH0', 'K']), 'aeronautical': ('JJ', ['EH2', 'R', 'OW0', 'N', 'AA1', 'T', 'AH0', 'K', 'AH0', 'L']), 'aeronautics': ('NNS', ['EH2', 'R', 'AH0', 'N', 'AO1', 'T', 'IH0', 'K', 'S']), 'aerostat': ('NN', ['EH1', 'R', 'OW0', 'S', 'T', 'AE2', 'T']), 'aesthete': ('NN', ['EH1', 'S', 'TH', 'IY0', 'T']), 'aesthetic': ('JJ', ['EH0', 'S', 'TH', 'EH1', 'T', 'IH0', 'K']), 'aesthetics': ('NNS', ['EH0', 'S', 'TH', 'EH1', 'T', 'IH0', 'K', 'S']), 'esthetics': ('NNS', ['EH0', 'S', 'TH', 'EH1', 'T', 'IH0', 'K', 'S']), 'afar': ('NN', ['AH0', 'F', 'AA1', 'R']), 'affable': ('JJ', ['AE1', 'F', 'AH0', 'B', 'AH0', 'L']), 'affably': ('RB', ['AE1', 'F', 'AH0', 'B', 'L', 'IY0']), 'affair': ('NN', ['AH0', 'F', 'EH1', 'R']), 'affected': ('JJ', ['AH0', 'F', 'EH1', 'K', 'T', 'AH0', 'D']), 'affecting': ('VBG', ['AH0', 'F', 'EH1', 'K', 'T', 'IH0', 'NG']), 'affect': ('NN', ['AH0', 'F', 'EH1', 'K', 'T']), 'affectation': ('NN', ['AE2', 'F', 'EH0', 'K', 'T', 'EY1', 'SH', 'AH0', 'N']), 'affection': ('NN', ['AH0', 'F', 'EH1', 'K', 'SH', 'AH0', 'N']), 'affectionate': ('NN', ['AH0', 'F', 'EH1', 'K', 'SH', 'AH0', 'N', 'AH0', 'T']), 'affectionately': ('RB', ['AH0', 'F', 'EH1', 'K', 'SH', 'AH0', 'N', 'AH0', 'T', 'L', 'IY0']), 'affective': ('JJ', ['AH0', 'F', 'EH1', 'K', 'T', 'IH0', 'V']), 'affectively': ('RB', ['AH0', 'F', 'EH1', 'K', 'T', 'IH0', 'V', 'L', 'IY0']), 'afferent': ('NN', ['AE1', 'F', 'ER0', 'AH0', 'N', 'T']), 'affiant': ('NN', ['AE1', 'F', 'IY0', 'AH0', 'N', 'T']), 'affidavit': ('NN', ['AE2', 'F', 'AH0', 'D', 'EY1', 'V', 'AH0', 'T']), 'affiliated': ('VBN', ['AH0', 'F', 'IH1', 'L', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'affiliating': ('VBG', ['AH0', 'F', 'IH1', 'L', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'affiliate': ('NN', ['AH0', 'F', 'IH1', 'L', 'IY0', 'EY2', 'T']), 'affiliation': ('NN', ['AH0', 'F', 'IH2', 'L', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'affine': ('NN', ['AH0', 'F', 'AY1', 'N']), 'affined': ('VBN', ['AH0', 'F', 'AY1', 'N', 'D']), 'affinities': ('NNS', ['AH0', 'F', 'IH1', 'N', 'AH0', 'T', 'IY0', 'Z']), 'affinity': ('NN', ['AH0', 'F', 'IH1', 'N', 'AH0', 'T', 'IY0']), 'affirmed': ('VBN', ['AH0', 'F', 'ER1', 'M', 'D']), 'affirming': ('VBG', ['AH0', 'F', 'ER1', 'M', 'IH0', 'NG']), 'affirm': ('NN', ['AH0', 'F', 'ER1', 'M']), 'affirmation': ('NN', ['AE2', 'F', 'ER0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'affirmative': ('JJ', ['AH0', 'F', 'ER1', 'M', 'AH0', 'T', 'IH0', 'V']), 'affirmatively': ('RB', ['AH0', 'F', 'ER1', 'M', 'AH0', 'T', 'IH0', 'V', 'L', 'IY0']), 'affixed': ('NN', ['AH0', 'F', 'IH1', 'K', 'S', 'T']), 'affixing': ('VBG', ['AH0', 'F', 'IH1', 'K', 'S', 'IH0', 'NG']), 'affix': ('NN', ['AE1', 'F', 'IH0', 'K', 'S']), 'affixes': ('NNS', ['AE1', 'F', 'IH0', 'K', 'S', 'IH0', 'Z']), 'afflicted': ('VBN', ['AH0', 'F', 'L', 'IH1', 'K', 'T', 'AH0', 'D']), 'afflicting': ('VBG', ['AH0', 'F', 'L', 'IH1', 'K', 'T', 'IH0', 'NG']), 'afflict': ('NN', ['AH0', 'F', 'L', 'IH1', 'K', 'T']), 'affliction': ('NN', ['AH0', 'F', 'L', 'IH1', 'K', 'SH', 'AH0', 'N']), 'affluence': ('NN', ['AE1', 'F', 'L', 'UW0', 'AH0', 'N', 'S']), 'affluent': ('NN', ['AE1', 'F', 'L', 'UW0', 'AH0', 'N', 'T']), 'afforded': ('VBD', ['AH0', 'F', 'AO1', 'R', 'D', 'AH0', 'D']), 'affording': ('VBG', ['AH0', 'F', 'AO1', 'R', 'D', 'IH0', 'NG']), 'afford': ('NN', ['AH0', 'F', 'AO1', 'R', 'D']), 'affordable': ('JJ', ['AH0', 'F', 'AO1', 'R', 'D', 'AH0', 'B', 'AH0', 'L']), 'affronted': ('VBN', ['AH0', 'F', 'R', 'AH1', 'N', 'T', 'IH0', 'D']), 'affronting': ('VBG', ['AH0', 'F', 'R', 'AH1', 'N', 'T', 'IH0', 'NG']), 'affront': ('NN', ['AH0', 'F', 'R', 'AH1', 'N', 'T']), 'afghan': ('NN', ['AE1', 'F', 'G', 'AE2', 'N']), 'afield': ('NN', ['AH0', 'F', 'IY1', 'L', 'D']), 'afire': ('NN', ['AH0', 'F', 'AY1', 'R']), 'aflame': ('NN', ['AH0', 'F', 'L', 'EY1', 'M']), 'afloat': ('NN', ['AH0', 'F', 'L', 'OW1', 'T']), 'aflutter': ('NN', ['AH0', 'F', 'L', 'AH1', 'T', 'ER0']), 'afoot': ('NN', ['AH0', 'F', 'UH1', 'T']), 'aforementioned': ('VBN', ['AH0', 'F', 'AO1', 'R', 'M', 'EH2', 'N', 'SH', 'AH0', 'N', 'D']), 'aforesaid': ('NN', ['AH0', 'F', 'AO1', 'R', 'S', 'EH2', 'D']), 'aforethought': ('NN', ['AH0', 'F', 'AO1', 'R', 'TH', 'AA2', 'T']), 'afoul': ('NN', ['AH0', 'F', 'AW1', 'L']), 'afraid': ('NN', ['AH0', 'F', 'R', 'EY1', 'D']), 'afresh': ('NN', ['AH0', 'F', 'R', 'EH1', 'SH']), 'african': ('JJ', ['AE1', 'F', 'R', 'AH0', 'K', 'AH0', 'N']), 'africanize': ('VB', ['AE1', 'F', 'R', 'AH0', 'K', 'AH0', 'N', 'AY2', 'Z']), 'aft': ('NN', ['AE1', 'F', 'T']), 'after': ('IN', ['AE1', 'F', 'T', 'ER0']), 'aftermath': ('NN', ['AE1', 'F', 'T', 'ER0', 'M', 'AE2', 'TH']), 'afternoon': ('NN', ['AE2', 'F', 'T', 'ER0', 'N', 'UW1', 'N']), 'aftertaste': ('NN', ['AE1', 'F', 'T', 'ER0', 'T', 'EY2', 'S', 'T']), 'afterthought': ('NN', ['AE1', 'F', 'T', 'ER0', 'TH', 'AA2', 'T']), 'afterwards': ('NNS', ['AE1', 'F', 'T', 'ER0', 'W', 'ER0', 'D', 'Z']), 'afterward': ('RB', ['AE1', 'F', 'T', 'ER0', 'W', 'ER0', 'D']), 'aga': ('NN', ['AA1', 'G', 'AH0']), 'again': ('RB', ['AH0', 'G', 'EH1', 'N']), 'against': ('IN', ['AH0', 'G', 'EH1', 'N', 'S', 'T']), 'agape': ('NN', ['AH0', 'G', 'EY1', 'P']), 'agasp': ('NN', ['AH0', 'G', 'AE1', 'S', 'P']), 'aghast': ('NN', ['AH0', 'G', 'AE1', 'S', 'T']), 'agate': ('NN', ['AE1', 'G', 'AH0', 'T']), 'age': ('NN', ['EY1', 'JH']), 'aged': ('VBN', ['EY1', 'JH', 'D']), 'aging': ('VBG', ['EY1', 'JH', 'IH0', 'NG']), 'ageless': ('NN', ['EY1', 'JH', 'L', 'AH0', 'S']), 'agencies': ('NNS', ['EY1', 'JH', 'AH0', 'N', 'S', 'IY0', 'Z']), 'agency': ('NN', ['EY1', 'JH', 'AH0', 'N', 'S', 'IY0']), 'agenda': ('NN', ['AH0', 'JH', 'EH1', 'N', 'D', 'AH0']), 'agent': ('NN', ['EY1', 'JH', 'AH0', 'N', 'T']), 'ageratum': ('NN', ['AH0', 'JH', 'EH1', 'R', 'AH0', 'T', 'AH0', 'M']), 'agglomerate': ('NN', ['AH0', 'G', 'L', 'AA1', 'M', 'ER0', 'EY2', 'T']), 'agglomeration': ('NN', ['AH0', 'G', 'L', 'AA2', 'M', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'agglutinate': ('NN', ['AH0', 'G', 'L', 'UW1', 'T', 'IH0', 'N', 'EY2', 'T']), 'aggrandizing': ('VBG', ['AE1', 'G', 'R', 'AH0', 'N', 'D', 'AY2', 'Z', 'IH0', 'NG']), 'aggrandize': ('VB', ['AH0', 'G', 'R', 'AE1', 'N', 'D', 'AY2', 'Z']), 'aggrandizement': ('NN', ['AE1', 'G', 'R', 'AH0', 'N', 'D', 'AY2', 'Z', 'M', 'AH0', 'N', 'T']), 'aggravated': ('VBN', ['AE1', 'G', 'R', 'AH0', 'V', 'EY2', 'T', 'AH0', 'D']), 'aggravating': ('VBG', ['AE1', 'G', 'R', 'AH0', 'V', 'EY2', 'T', 'IH0', 'NG']), 'aggravate': ('NN', ['AE1', 'G', 'R', 'AH0', 'V', 'EY2', 'T']), 'aggravation': ('NN', ['AE2', 'G', 'R', 'AH0', 'V', 'EY1', 'SH', 'AH0', 'N']), 'aggregated': ('VBN', ['AE1', 'G', 'R', 'AH0', 'G', 'EY2', 'T', 'AH0', 'D']), 'aggregate': ('NN', ['AE1', 'G', 'R', 'AH0', 'G', 'AH0', 'T']), 'aggress': ('NN', ['AH0', 'G', 'R', 'EH1', 'S']), 'aggression': ('NN', ['AH0', 'G', 'R', 'EH1', 'SH', 'AH0', 'N']), 'aggressive': ('JJ', ['AH0', 'G', 'R', 'EH1', 'S', 'IH0', 'V']), 'aggressor': ('NN', ['AH0', 'G', 'R', 'EH1', 'S', 'ER0']), 'aggrieved': ('VBN', ['AH0', 'G', 'R', 'IY1', 'V', 'D']), 'aggrieve': ('NN', ['AH0', 'G', 'R', 'IY1', 'V']), 'agile': ('NN', ['AE1', 'JH', 'AH0', 'L']), 'agility': ('NN', ['AH0', 'JH', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'agitated': ('VBN', ['AE1', 'JH', 'AH0', 'T', 'EY2', 'T', 'AH0', 'D']), 'agitating': ('VBG', ['AE1', 'JH', 'AH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'agitate': ('NN', ['AE1', 'JH', 'AH0', 'T', 'EY2', 'T']), 'agitation': ('NN', ['AE2', 'JH', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'agitator': ('NN', ['AE1', 'JH', 'AH0', 'T', 'EY2', 'T', 'ER0']), 'agleam': ('NN', ['AH0', 'G', 'L', 'IY1', 'M']), 'aglitter': ('NN', ['AH0', 'G', 'L', 'IH1', 'T', 'ER0']), 'aglow': ('NN', ['AH0', 'G', 'L', 'OW1']), 'agnostic': ('JJ', ['AE0', 'G', 'N', 'AA1', 'S', 'T', 'IH0', 'K']), 'ago': ('RB', ['AH0', 'G', 'OW1']), 'agog': ('NN', ['AH0', 'G', 'AA1', 'G']), 'agonist': ('NN', ['AE1', 'G', 'AH0', 'N', 'IH0', 'S', 'T']), 'agonized': ('VBN', ['AE1', 'G', 'AH0', 'N', 'AY2', 'Z', 'D']), 'agonizing': ('VBG', ['AE1', 'G', 'AH0', 'N', 'AY0', 'Z', 'IH0', 'NG']), 'agonize': ('VB', ['AE1', 'G', 'AH0', 'N', 'AY2', 'Z']), 'agonizingly': ('RB', ['AE1', 'G', 'AH0', 'N', 'AY0', 'Z', 'IH0', 'NG', 'L', 'IY0']), 'agonies': ('NNS', ['AE1', 'G', 'AH0', 'N', 'IY0', 'Z']), 'agony': ('NN', ['AE1', 'G', 'AH0', 'N', 'IY0']), 'agora': ('NNS', ['AE1', 'G', 'ER0', 'AH0']), 'agrarian': ('JJ', ['AH0', 'G', 'R', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'agrarianism': ('NN', ['AH0', 'G', 'R', 'EH1', 'R', 'IY0', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'agree': ('NN', ['AH0', 'G', 'R', 'IY1']), 'agreed': ('VBD', ['AH0', 'G', 'R', 'IY1', 'D']), 'agreeing': ('VBG', ['AH0', 'G', 'R', 'IY1', 'IH0', 'NG']), 'agreeable': ('JJ', ['AH0', 'G', 'R', 'IY1', 'AH0', 'B', 'AH0', 'L']), 'agreement': ('NN', ['AH0', 'G', 'R', 'IY1', 'M', 'AH0', 'N', 'T']), 'agricultural': ('JJ', ['AE2', 'G', 'R', 'AH0', 'K', 'AH1', 'L', 'CH', 'ER0', 'AH0', 'L']), 'agriculturalist': ('NN', ['AE2', 'G', 'R', 'AH0', 'K', 'AH1', 'L', 'CH', 'ER0', 'AH0', 'L', 'AH0', 'S', 'T']), 'agriculture': ('NN', ['AE1', 'G', 'R', 'IH0', 'K', 'AH2', 'L', 'CH', 'ER0']), 'agronomist': ('NN', ['AH0', 'G', 'R', 'AA1', 'N', 'AH0', 'M', 'IH0', 'S', 'T']), 'aground': ('NN', ['AH0', 'G', 'R', 'AW1', 'N', 'D']), 'ah': ('NN', ['AA1']), 'aha': ('NN', ['AA2', 'HH', 'AA1']), 'ahead': ('RB', ['AH0', 'HH', 'EH1', 'D']), 'ahold': ('NN', ['AH0', 'HH', 'OW1', 'L', 'D']), 'ahoy': ('NN', ['AH0', 'HH', 'OY1']), 'ais': ('NN', ['AY1', 'Z']), 'ai': ('NN', ['AY1']), 'aided': ('VBD', ['EY1', 'D', 'AH0', 'D']), 'aiding': ('VBG', ['EY1', 'D', 'IH0', 'NG']), 'aid': ('NN', ['EY1', 'D']), 'ailing': ('VBG', ['EY1', 'L', 'IH0', 'NG']), 'ail': ('NN', ['EY1', 'L']), 'ailment': ('NN', ['EY1', 'L', 'M', 'AH0', 'N', 'T']), 'aimed': ('VBN', ['EY1', 'M', 'D']), 'aiming': ('VBG', ['EY1', 'M', 'IH0', 'NG']), 'aim': ('NN', ['EY1', 'M']), 'aimless': ('NN', ['EY1', 'M', 'L', 'AH0', 'S']), "ain't": ('NN', ['EY1', 'N', 'T']), 'air': ('NN', ['EH1', 'R']), 'aired': ('VBN', ['EH1', 'R', 'D']), 'airing': ('VBG', ['EH1', 'R', 'IH0', 'NG']), 'airless': ('NN', ['EH1', 'R', 'L', 'AH0', 'S']), 'airy': ('NN', ['EH1', 'R', 'IY0']), 'aisle': ('NN', ['AY1', 'L']), 'ajar': ('NN', ['AH0', 'JH', 'AA1', 'R']), 'ake': ('NN', ['EY1', 'K']), 'akimbo': ('NN', ['AH0', 'K', 'IH1', 'M', 'B', 'OW2']), 'akin': ('NN', ['AH0', 'K', 'IH1', 'N']), 'al': ('NN', ['AE1', 'L']), 'ala': ('NN', ['EY1', 'L', 'AH0']), 'alabaster': ('NN', ['AE1', 'L', 'AH0', 'B', 'AE2', 'S', 'T', 'ER0']), 'alacrity': ('NN', ['AH0', 'L', 'AE1', 'K', 'R', 'AH0', 'T', 'IY0']), 'alan': ('NN', ['AE1', 'L', 'AH0', 'N']), 'aland': ('NN', ['AE1', 'L', 'AH0', 'N', 'D']), 'alar': ('NN', ['EY1', 'L', 'AA2', 'R']), 'alarm': ('NN', ['AH0', 'L', 'AA1', 'R', 'M']), 'alarmed': ('JJ', ['AH0', 'L', 'AA1', 'R', 'M', 'D']), 'alarming': ('VBG', ['AH0', 'L', 'AA1', 'R', 'M', 'IH0', 'NG']), 'alarmist': ('NN', ['AH0', 'L', 'AA1', 'R', 'M', 'AH0', 'S', 'T']), 'alas': ('NNS', ['AH0', 'L', 'AE1', 'S']), 'albacore': ('NN', ['AE1', 'L', 'B', 'AH0', 'K', 'AO2', 'R']), 'alban': ('NN', ['AA1', 'L', 'B', 'AH0', 'N']), 'albanian': ('JJ', ['AE0', 'L', 'B', 'EY1', 'N', 'IY0', 'AH0', 'N']), 'albatross': ('NN', ['AE1', 'L', 'B', 'AH0', 'T', 'R', 'AA2', 'S']), 'albee': ('NN', ['AH0', 'L', 'B', 'IY1']), 'albeit': ('NN', ['AO0', 'L', 'B', 'IY1', 'IH0', 'T']), 'albino': ('NN', ['AE0', 'L', 'B', 'AY1', 'N', 'OW2']), 'albion': ('NN', ['AE1', 'L', 'B', 'IY0', 'AH0', 'N']), 'album': ('NN', ['AE1', 'L', 'B', 'AH0', 'M']), 'albumin': ('NN', ['AE0', 'L', 'B', 'Y', 'UW1', 'M', 'AH0', 'N']), 'alcazar': ('NN', ['AA1', 'L', 'K', 'AH0', 'Z', 'AA2', 'R']), 'alchemically': ('RB', ['AE0', 'L', 'K', 'EH1', 'M', 'AH0', 'K', 'L', 'IY0']), 'alchemist': ('NN', ['AE1', 'L', 'CH', 'AH0', 'M', 'IH0', 'S', 'T']), 'alchemy': ('NN', ['AE1', 'L', 'K', 'AH0', 'M', 'IY0']), 'alco': ('NN', ['AE1', 'L', 'K', 'OW0']), 'alcohol': ('NN', ['AE1', 'L', 'K', 'AH0', 'HH', 'AA2', 'L']), 'alcoholic': ('JJ', ['AE2', 'L', 'K', 'AH0', 'HH', 'AA1', 'L', 'IH0', 'K']), 'alcoholism': ('NN', ['AE1', 'L', 'K', 'AH0', 'HH', 'AO2', 'L', 'IH2', 'Z', 'AH0', 'M']), 'alcove': ('NN', ['AE1', 'L', 'K', 'OW2', 'V']), 'alday': ('NN', ['AE1', 'L', 'D', 'EY0']), 'aldebaran': ('NN', ['AE0', 'L', 'D', 'EH1', 'B', 'ER0', 'AH0', 'N']), 'alder': ('NN', ['AO1', 'L', 'D', 'ER0']), 'aller': ('NN', ['AO1', 'L', 'ER0']), 'aldermen': ('NNS', ['AO1', 'L', 'D', 'ER0', 'M', 'IH0', 'N']), 'alderman': ('NN', ['AO1', 'L', 'D', 'ER0', 'M', 'AH0', 'N']), 'ale': ('NN', ['EY1', 'L']), 'aleatory': ('NN', ['EY1', 'L', 'IY0', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'alehouse': ('NN', ['EY1', 'L', 'HH', 'AW2', 'S']), 'alert': ('NN', ['AH0', 'L', 'ER1', 'T']), 'alertly': ('RB', ['AH0', 'L', 'ER1', 'T', 'L', 'IY0']), 'alertness': ('NN', ['AH0', 'L', 'ER1', 'T', 'N', 'AH0', 'S']), 'aleutian': ('JJ', ['AH0', 'L', 'UW1', 'SH', 'AH0', 'N']), 'alewives': ('NNS', ['EY1', 'L', 'W', 'AY2', 'V', 'Z']), 'alewife': ('NN', ['EY1', 'L', 'W', 'AY2', 'F']), 'alexanders': ('NNS', ['AE2', 'L', 'IH0', 'G', 'Z', 'AE1', 'N', 'D', 'ER0', 'Z']), 'alexandrine': ('NN', ['AE2', 'L', 'AH0', 'G', 'Z', 'AE1', 'N', 'D', 'R', 'IY0', 'N']), 'alfa': ('NN', ['AE1', 'L', 'F', 'AH0']), 'alfalfa': ('NN', ['AE2', 'L', 'F', 'AE1', 'L', 'F', 'AH0']), 'algae': ('NN', ['AE1', 'L', 'JH', 'IY0']), 'algal': ('NN', ['AE1', 'L', 'G', 'AH0', 'L']), 'algebra': ('NN', ['AE1', 'L', 'JH', 'AH0', 'B', 'R', 'AH0']), 'algebraic': ('NN', ['AE2', 'L', 'JH', 'AH0', 'B', 'R', 'EY1', 'IH0', 'K']), 'algebraically': ('RB', ['AE2', 'L', 'JH', 'AH0', 'B', 'R', 'EY1', 'IH0', 'K', 'L', 'IY0']), 'algerian': ('JJ', ['AE0', 'L', 'JH', 'IH1', 'R', 'IY0', 'AH0', 'N']), 'algol': ('NN', ['AE1', 'L', 'G', 'AA0', 'L']), 'algonquin': ('NN', ['AE0', 'L', 'G', 'AA1', 'NG', 'K', 'W', 'IH0', 'N']), 'algorithm': ('NN', ['AE1', 'L', 'G', 'ER0', 'IH2', 'DH', 'AH0', 'M']), 'alhambra': ('NN', ['AE0', 'L', 'HH', 'AE1', 'M', 'B', 'R', 'AH0']), 'alias': ('NNS', ['EY1', 'L', 'IY0', 'AH0', 'S']), 'aliases': ('NNS', ['EY1', 'L', 'IY0', 'AH0', 'S', 'IH0', 'Z']), 'alibi': ('NN', ['AE1', 'L', 'AH0', 'B', 'AY2']), 'alien': ('NN', ['EY1', 'L', 'IY0', 'AH0', 'N']), 'alienate': ('NN', ['EY1', 'L', 'Y', 'AH0', 'N', 'EY2', 'T']), 'alienated': ('VBN', ['EY1', 'L', 'IY0', 'AH0', 'N', 'EY2', 'T', 'AH0', 'D']), 'alienating': ('VBG', ['EY1', 'L', 'IY0', 'AH0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'alienation': ('NN', ['EY2', 'L', 'IY0', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'alight': ('NN', ['AH0', 'L', 'AY1', 'T']), 'align': ('NN', ['AH0', 'L', 'AY1', 'N']), 'alignment': ('NN', ['AH0', 'L', 'AY1', 'N', 'M', 'AH0', 'N', 'T']), 'alike': ('RB', ['AH0', 'L', 'AY1', 'K']), 'alimentary': ('JJ', ['AE2', 'L', 'AH0', 'M', 'EH1', 'N', 'T', 'ER0', 'IY0']), 'alimony': ('NN', ['AE1', 'L', 'AH0', 'M', 'OW2', 'N', 'IY0']), 'aline': ('NN', ['AH0', 'L', 'AY1', 'N']), 'alive': ('JJ', ['AH0', 'L', 'AY1', 'V']), 'alkahest': ('NN', ['AE1', 'L', 'K', 'AH0', 'HH', 'EH2', 'S', 'T']), 'alkalies': ('NNS', ['AE1', 'L', 'K', 'AH0', 'L', 'AY2', 'Z']), 'alkali': ('NN', ['AE1', 'L', 'K', 'AH0', 'L', 'AY2']), 'alkaline': ('NN', ['AE1', 'L', 'K', 'AH0', 'L', 'AY2', 'N']), 'alkalinity': ('NN', ['AE2', 'L', 'K', 'AH0', 'L', 'IH1', 'N', 'AH0', 'T', 'IY0']), 'alkaloid': ('NN', ['AE1', 'L', 'K', 'AH0', 'L', 'OY2', 'D']), 'alkaloidal': ('NN', ['AE0', 'L', 'K', 'AH0', 'L', 'OY1', 'D', 'AH0', 'L']), 'alkermes': ('NNS', ['AA0', 'L', 'K', 'ER1', 'M', 'IY0', 'Z']), 'all': ('DT', ['AO1', 'L']), 'allah': ('NN', ['AA1', 'L', 'AH0']), 'allayed': ('NNS', ['AH0', 'L', 'EY1', 'D']), 'allaying': ('VBG', ['AH0', 'L', 'EY1', 'IH0', 'NG']), 'allay': ('NN', ['AH0', 'L', 'EY1']), 'allegation': ('NN', ['AE2', 'L', 'AH0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'alleged': ('VBN', ['AH0', 'L', 'EH1', 'JH', 'D']), 'alleging': ('VBG', ['AH0', 'L', 'EH1', 'JH', 'IH0', 'NG']), 'allege': ('NN', ['AH0', 'L', 'EH1', 'JH']), 'allegiance': ('NN', ['AH0', 'L', 'IY1', 'JH', 'AH0', 'N', 'S']), 'allegorical': ('JJ', ['AE2', 'L', 'AH0', 'G', 'AO1', 'R', 'AH0', 'K', 'AH0', 'L']), 'allegories': ('NNS', ['AE1', 'L', 'AH0', 'G', 'AO2', 'R', 'IY0', 'Z']), 'allegory': ('NN', ['AE1', 'L', 'AH0', 'G', 'AO2', 'R', 'IY0']), 'allegro': ('NN', ['AH0', 'L', 'EH1', 'G', 'R', 'OW2']), 'alleviated': ('VBN', ['AH0', 'L', 'IY1', 'V', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'alleviating': ('VBG', ['AH0', 'L', 'IY1', 'V', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'alleviate': ('NN', ['AH0', 'L', 'IY1', 'V', 'IY0', 'EY2', 'T']), 'alleviation': ('NN', ['AH0', 'L', 'IY2', 'V', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'alleys': ('NNS', ['AE1', 'L', 'IY0', 'Z']), 'alley': ('NN', ['AE1', 'L', 'IY0']), 'alleyway': ('RB', ['AE1', 'L', 'IY0', 'W', 'EY2']), 'alliance': ('NN', ['AH0', 'L', 'AY1', 'AH0', 'N', 'S']), 'alliant': ('NN', ['AH0', 'L', 'AY1', 'AH0', 'N', 'T']), 'allis': ('NNS', ['AE1', 'L', 'IH0', 'S']), 'allied': ('JJ', ['AH0', 'L', 'AY1', 'D']), 'alligator': ('NN', ['AE1', 'L', 'AH0', 'G', 'EY2', 'T', 'ER0']), 'alliteration': ('NN', ['AH0', 'L', 'IH1', 'T', 'ER0', 'EY2', 'SH', 'AH0', 'N']), 'alliterative': ('JJ', ['AH0', 'L', 'IH1', 'T', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'allocate': ('NN', ['AE1', 'L', 'AH0', 'K', 'EY2', 'T']), 'allocation': ('NN', ['AE2', 'L', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'allograph': ('NN', ['AE1', 'L', 'AH0', 'G', 'R', 'AE2', 'F']), 'allomorph': ('NN', ['AE1', 'L', 'AH0', 'M', 'AO0', 'R', 'F']), 'allotted': ('VBN', ['AH0', 'L', 'AA1', 'T', 'IH0', 'D']), 'allotting': ('VBG', ['AH0', 'L', 'AA1', 'T', 'IH0', 'NG']), 'allot': ('NN', ['AH0', 'L', 'AA1', 'T']), 'allotment': ('NN', ['AH0', 'L', 'AA1', 'T', 'M', 'AH0', 'N', 'T']), 'allowed': ('VBN', ['AH0', 'L', 'AW1', 'D']), 'allowing': ('VBG', ['AH0', 'L', 'AW1', 'IH0', 'NG']), 'allow': ('VB', ['AH0', 'L', 'AW1']), 'allowable': ('JJ', ['AH0', 'L', 'AW1', 'AH0', 'B', 'AH0', 'L']), 'allowance': ('NN', ['AH0', 'L', 'AW1', 'AH0', 'N', 'S']), 'alloy': ('NN', ['AE1', 'L', 'OY2']), 'allspice': ('NN', ['AO1', 'L', 'S', 'P', 'AY2', 'S']), 'alluded': ('VBD', ['AH0', 'L', 'UW1', 'D', 'AH0', 'D']), 'alluding': ('VBG', ['AH0', 'L', 'UW1', 'D', 'IH0', 'NG']), 'allude': ('NN', ['AH0', 'L', 'UW1', 'D']), 'alluring': ('VBG', ['AH0', 'L', 'UH1', 'R', 'IH0', 'NG']), 'allure': ('NN', ['AH0', 'L', 'UH1', 'R']), 'allusion': ('NN', ['AH0', 'L', 'UW1', 'ZH', 'AH0', 'N']), 'allusive': ('JJ', ['AH0', 'L', 'UW1', 'S', 'IH0', 'V']), 'alluvial': ('JJ', ['AE2', 'L', 'UW1', 'V', 'IY0', 'AH0', 'L']), 'alluvium': ('NN', ['AH0', 'L', 'UW1', 'V', 'IY0', 'AH0', 'M']), 'allying': ('VBG', ['AE1', 'L', 'AY0', 'IH0', 'NG']), 'ally': ('RB', ['AE1', 'L', 'AY0']), 'allies': ('NNS', ['AE1', 'L', 'AY0', 'Z']), 'alma': ('NN', ['AE1', 'L', 'M', 'AH0']), 'alman': ('NN', ['AE1', 'L', 'M', 'AH0', 'N']), 'almanac': ('NN', ['AO1', 'L', 'M', 'AH0', 'N', 'AE2', 'K']), 'almighty': ('NN', ['AO0', 'L', 'M', 'AY1', 'T', 'IY0']), 'almond': ('NN', ['AA1', 'M', 'AH0', 'N', 'D']), 'almoner': ('NN', ['AA1', 'L', 'M', 'AH0', 'N', 'ER0']), 'almost': ('RB', ['AO1', 'L', 'M', 'OW2', 'S', 'T']), 'alms': ('NNS', ['AA1', 'L', 'M', 'Z']), 'aloe': ('NN', ['AE1', 'L', 'OW2']), 'aloft': ('RB', ['AH0', 'L', 'AO1', 'F', 'T']), 'alone': ('RB', ['AH0', 'L', 'OW1', 'N']), 'along': ('IN', ['AH0', 'L', 'AO1', 'NG']), 'alongside': ('RB', ['AH0', 'L', 'AO1', 'NG', 'S', 'AY1', 'D']), 'aloof': ('NN', ['AH0', 'L', 'UW1', 'F']), 'aloofness': ('NN', ['AH0', 'L', 'UW1', 'F', 'N', 'AH0', 'S']), 'aloud': ('NN', ['AH0', 'L', 'AW1', 'D']), 'alpaca': ('NN', ['AE0', 'L', 'P', 'AE1', 'K', 'AH0']), 'alpha': ('NN', ['AE1', 'L', 'F', 'AH0']), 'alphabet': ('NN', ['AE1', 'L', 'F', 'AH0', 'B', 'EH2', 'T']), 'alphabetic': ('JJ', ['AE2', 'L', 'F', 'AH0', 'B', 'EH1', 'T', 'IH0', 'K']), 'alphabetical': ('JJ', ['AE2', 'L', 'F', 'AH0', 'B', 'EH1', 'T', 'IH0', 'K', 'AH0', 'L']), 'alphabetically': ('RB', ['AE2', 'L', 'F', 'AH0', 'B', 'EH1', 'T', 'IH0', 'K', 'L', 'IY0']), 'alphabetize': ('VB', ['AE1', 'L', 'F', 'AH0', 'B', 'AH0', 'T', 'AY2', 'Z']), 'alphonsine': ('NN', ['AH0', 'L', 'F', 'AA1', 'N', 'S', 'IY0', 'N']), 'alpine': ('NN', ['AE1', 'L', 'P', 'AY2', 'N']), 'already': ('RB', ['AO0', 'L', 'R', 'EH1', 'D', 'IY0']), 'als': ('NNS', ['AE1', 'L', 'Z']), 'alsatian': ('JJ', ['AE0', 'L', 'S', 'EY1', 'SH', 'AH0', 'N']), 'also': ('RB', ['AO1', 'L', 'S', 'OW0']), 'alt': ('NN', ['AA1', 'L', 'T']), 'altaic': ('NN', ['AE0', 'L', 'T', 'EY1', 'IH0', 'K']), 'altar': ('NN', ['AO1', 'L', 'T', 'ER0']), 'altarpiece': ('NN', ['AO1', 'L', 'T', 'ER0', 'P', 'IY2', 'S']), 'altered': ('VBN', ['AO1', 'L', 'T', 'ER0', 'D']), 'altering': ('VBG', ['AO1', 'L', 'T', 'ER0', 'IH0', 'NG']), 'alter': ('NN', ['AO1', 'L', 'T', 'ER0']), 'alteration': ('NN', ['AO2', 'L', 'T', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'altercation': ('NN', ['AA2', 'L', 'T', 'ER0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'alternate': ('NN', ['AO1', 'L', 'T', 'ER0', 'N', 'AH0', 'T']), 'alternated': ('VBN', ['AO1', 'L', 'T', 'ER0', 'N', 'EY2', 'T', 'AH0', 'D']), 'alternating': ('VBG', ['AO1', 'L', 'T', 'ER0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'alternately': ('RB', ['AO1', 'L', 'T', 'ER0', 'N', 'AH0', 'T', 'L', 'IY0']), 'alternation': ('NN', ['AO1', 'L', 'T', 'ER0', 'N', 'EY2', 'SH', 'AH0', 'N']), 'alternative': ('NN', ['AO0', 'L', 'T', 'ER1', 'N', 'AH0', 'T', 'IH0', 'V']), 'alternatively': ('RB', ['AO0', 'L', 'T', 'ER1', 'N', 'AH0', 'T', 'IH0', 'V', 'L', 'IY0']), 'althea': ('NN', ['AE0', 'L', 'TH', 'IY1', 'AH0']), 'although': ('IN', ['AO2', 'L', 'DH', 'OW1']), 'altimeter': ('NN', ['AE0', 'L', 'T', 'IH1', 'M', 'AH0', 'T', 'ER0']), 'altimetry': ('NN', ['AE0', 'L', 'T', 'IH1', 'M', 'AH0', 'T', 'R', 'IY0']), 'altitude': ('NN', ['AE1', 'L', 'T', 'AH0', 'T', 'UW2', 'D']), 'altos': ('NNS', ['AE1', 'L', 'T', 'OW0', 'Z']), 'alto': ('NN', ['AE1', 'L', 'T', 'OW0']), 'altogether': ('RB', ['AO2', 'L', 'T', 'AH0', 'G', 'EH1', 'DH', 'ER0']), 'altruism': ('NN', ['AE1', 'L', 'T', 'R', 'UW0', 'IH2', 'Z', 'AH0', 'M']), 'altruistic': ('JJ', ['AO2', 'L', 'T', 'R', 'UW0', 'IH1', 'S', 'T', 'IH0', 'K']), 'alum': ('NN', ['AE1', 'L', 'AH0', 'M']), 'alumina': ('NN', ['AH0', 'L', 'UW1', 'M', 'AH0', 'N', 'AH0']), 'aluminium': ('NN', ['AH0', 'L', 'UW1', 'M', 'IH0', 'N', 'AH0', 'M']), 'aluminize': ('VB', ['AH0', 'L', 'UW1', 'M', 'AH0', 'N', 'AY2', 'Z']), 'aluminum': ('NN', ['AH0', 'L', 'UW1', 'M', 'AH0', 'N', 'AH0', 'M']), 'alumna': ('NN', ['AH0', 'L', 'AH1', 'M', 'N', 'AH0']), 'alumni': ('NNS', ['AH0', 'L', 'AH1', 'M', 'N', 'AY2']), 'alumnus': ('NN', ['AH0', 'L', 'AH1', 'M', 'N', 'AH0', 'S']), 'alveolar': ('NN', ['AE0', 'L', 'V', 'IY1', 'AH0', 'L', 'ER0']), 'alveoli': ('NN', ['AE0', 'L', 'V', 'IY1', 'AH0', 'L', 'AY2']), 'always': ('RB', ['AO1', 'L', 'W', 'EY2', 'Z']), 'am': ('VBP', ['AE1', 'M']), 'amadou': ('NN', ['AE1', 'M', 'AH0', 'D', 'UW2']), 'amalgam': ('NN', ['AH0', 'M', 'AE1', 'L', 'G', 'AH0', 'M']), 'amalgamated': ('VBN', ['AH0', 'M', 'AE1', 'L', 'G', 'AH0', 'M', 'EY2', 'T', 'IH0', 'D']), 'amalgamating': ('VBG', ['AH0', 'M', 'AE1', 'L', 'G', 'AH0', 'M', 'EY2', 'T', 'IH0', 'NG']), 'amalgamate': ('NN', ['AH0', 'M', 'AE1', 'L', 'G', 'AH0', 'M', 'EY2', 'T']), 'amalgamation': ('NN', ['AH0', 'M', 'AE2', 'L', 'G', 'AH0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'amaranth': ('NN', ['AE1', 'M', 'ER0', 'AE2', 'N', 'TH']), 'amaryllis': ('NNS', ['AE2', 'M', 'ER0', 'IH1', 'L', 'AH0', 'S']), 'amassed': ('VBN', ['AH0', 'M', 'AE1', 'S', 'T']), 'amassing': ('VBG', ['AH0', 'M', 'AE1', 'S', 'IH0', 'NG']), 'amass': ('NN', ['AH0', 'M', 'AE1', 'S']), 'amateur': ('NN', ['AE1', 'M', 'AH0', 'T', 'ER2']), 'amateurish': ('JJ', ['AE1', 'M', 'AH0', 'CH', 'ER0', 'IH0', 'SH']), 'amateurism': ('NN', ['AE1', 'M', 'AH0', 'CH', 'ER0', 'IH0', 'Z', 'AH0', 'M']), 'amazed': ('VBN', ['AH0', 'M', 'EY1', 'Z', 'D']), 'amazing': ('VBG', ['AH0', 'M', 'EY1', 'Z', 'IH0', 'NG']), 'amaze': ('NN', ['AH0', 'M', 'EY1', 'Z']), 'amazement': ('NN', ['AH0', 'M', 'EY1', 'Z', 'M', 'AH0', 'N', 'T']), 'amazon': ('NN', ['AE1', 'M', 'AH0', 'Z', 'AA2', 'N']), 'amazonian': ('JJ', ['AE2', 'M', 'AH0', 'Z', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'ambassador': ('NN', ['AE0', 'M', 'B', 'AE1', 'S', 'AH0', 'D', 'ER0']), 'ambassadorial': ('JJ', ['AE0', 'M', 'B', 'AE2', 'S', 'AH0', 'D', 'AO1', 'R', 'IY0', 'AH0', 'L']), 'ambassadorship': ('NN', ['AE0', 'M', 'B', 'AE1', 'S', 'AH0', 'D', 'ER0', 'SH', 'IH2', 'P']), 'ambassadress': ('NN', ['AE0', 'M', 'B', 'AE1', 'S', 'AH0', 'D', 'R', 'AH0', 'S']), 'amber': ('NN', ['AE1', 'M', 'B', 'ER0']), 'b': ('NN', ['B', 'IY1']), 'baas': ('NN', ['B', 'AA1', 'Z']), 'baba': ('NN', ['B', 'AH1', 'B', 'AH0']), 'babbitt': ('NN', ['B', 'AE1', 'B', 'IH0', 'T']), 'babbled': ('VBN', ['B', 'AE1', 'B', 'AH0', 'L', 'D']), 'babbling': ('NN', ['B', 'AE1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'babble': ('JJ', ['B', 'AE1', 'B', 'AH0', 'L']), 'babbler': ('NN', ['B', 'AE1', 'B', 'L', 'ER0']), 'babe': ('NN', ['B', 'EY1', 'B']), 'babel': ('NN', ['B', 'AE1', 'B', 'AH0', 'L']), 'babish': ('NN', ['B', 'AE1', 'B', 'IH0', 'SH']), 'babu': ('NN', ['B', 'AA0', 'B', 'UW1']), 'baboon': ('NN', ['B', 'AH0', 'B', 'UW1', 'N']), 'babies': ('NNS', ['B', 'EY1', 'B', 'IY0', 'Z']), 'baby': ('NN', ['B', 'EY1', 'B', 'IY0']), 'babyhood': ('NN', ['B', 'EY1', 'B', 'IY0', 'HH', 'UH2', 'D']), 'babyish': ('NN', ['B', 'EY1', 'B', 'IY0', 'IH0', 'SH']), 'babylonian': ('NN', ['B', 'AE2', 'B', 'AH0', 'L', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'baccalaureate': ('NN', ['B', 'AE2', 'K', 'AH0', 'L', 'AO1', 'R', 'IY0', 'AH0', 'T']), 'baccarat': ('NN', ['B', 'AA2', 'K', 'ER0', 'AA1']), 'bacchanal': ('NN', ['B', 'AH0', 'K', 'EY1', 'N', 'AH0', 'L']), 'bacchanalia': ('NN', ['B', 'AE2', 'K', 'AH0', 'N', 'EY1', 'L', 'Y', 'AH0']), 'bacchus': ('NN', ['B', 'AE1', 'K', 'IH0', 'S']), 'bacharach': ('NN', ['B', 'AE1', 'K', 'ER0', 'AE0', 'K']), 'bachelor': ('NN', ['B', 'AE1', 'CH', 'AH0', 'L', 'ER0']), 'bacillus': ('NN', ['B', 'AH0', 'S', 'IH1', 'L', 'AH0', 'S']), 'back': ('RB', ['B', 'AE1', 'K']), 'backed': ('VBD', ['B', 'AE1', 'K', 'T']), 'backing': ('NN', ['B', 'AE1', 'K', 'IH0', 'NG']), 'backbite': ('NN', ['B', 'AE1', 'K', 'B', 'AY2', 'T']), 'backbiting': ('NN', ['B', 'AE1', 'K', 'B', 'AY2', 'T', 'IH0', 'NG']), 'backboard': ('NN', ['B', 'AE1', 'K', 'B', 'AO2', 'D']), 'backbone': ('NN', ['B', 'AE1', 'K', 'B', 'OW2', 'N']), 'backdoor': ('NN', ['B', 'AE1', 'K', 'D', 'AO2', 'R']), 'backer': ('NN', ['B', 'AE1', 'K', 'ER0']), 'backgammon': ('NN', ['B', 'AE1', 'K', 'G', 'AE2', 'M', 'AH0', 'N']), 'background': ('NN', ['B', 'AE1', 'K', 'G', 'R', 'AW2', 'N', 'D']), 'backhand': ('NN', ['B', 'AE1', 'K', 'HH', 'AE2', 'N', 'D']), 'backhanded': ('VBN', ['B', 'AE1', 'K', 'HH', 'AE2', 'N', 'D', 'AH0', 'D']), 'backlash': ('NN', ['B', 'AE1', 'K', 'L', 'AE2', 'SH']), 'backless': ('NN', ['B', 'AE1', 'K', 'L', 'AH0', 'S']), 'backlog': ('NN', ['B', 'AE1', 'K', 'L', 'AA2', 'G']), 'backs': ('NNS', ['B', 'AE1', 'K', 'S']), 'backsaw': ('NN', ['B', 'AE1', 'K', 'S', 'AA2']), 'backset': ('NN', ['B', 'AE1', 'K', 'S', 'EH2', 'T']), 'backside': ('NN', ['B', 'AE1', 'K', 'S', 'AY2', 'D']), 'backsliding': ('NN', ['B', 'AE1', 'K', 'S', 'L', 'AY2', 'D', 'IH0', 'NG']), 'backslide': ('NN', ['B', 'AE1', 'K', 'S', 'L', 'AY2', 'D']), 'backstairs': ('NNS', ['B', 'AE1', 'K', 'S', 'T', 'EH2', 'R', 'Z']), 'backstitch': ('NN', ['B', 'AE1', 'K', 'S', 'T', 'IH0', 'CH']), 'backward': ('NN', ['B', 'AE1', 'K', 'W', 'ER0', 'D']), 'backwards': ('NNS', ['B', 'AE1', 'K', 'W', 'ER0', 'D', 'Z']), 'backwardation': ('NN', ['B', 'AE2', 'K', 'W', 'ER0', 'D', 'EY1', 'SH', 'AH0', 'N']), 'backwardness': ('NN', ['B', 'AE1', 'K', 'W', 'ER0', 'D', 'N', 'AH0', 'S']), 'backwash': ('NN', ['B', 'AE1', 'K', 'W', 'AA2', 'SH']), 'backwater': ('NN', ['B', 'AE1', 'K', 'W', 'AO2', 'T', 'ER0']), 'backwoods': ('NNS', ['B', 'AE1', 'K', 'W', 'UH1', 'D', 'Z']), 'backwoodsman': ('NN', ['B', 'AE1', 'K', 'W', 'UH1', 'D', 'Z', 'M', 'AH0', 'N']), 'bacon': ('NN', ['B', 'EY1', 'K', 'AH0', 'N']), 'bacteria': ('NNS', ['B', 'AE0', 'K', 'T', 'IH1', 'R', 'IY0', 'AH0']), 'bacterial': ('NN', ['B', 'AE0', 'K', 'T', 'IH1', 'R', 'IY0', 'AH0', 'L']), 'bacteriology': ('NN', ['B', 'AE2', 'K', 'T', 'IH0', 'R', 'IY2', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'bacterium': ('NN', ['B', 'AE0', 'K', 'T', 'IH1', 'R', 'IY0', 'AH0', 'M']), 'bad': ('JJ', ['B', 'AE1', 'D']), 'bade': ('NN', ['B', 'EY1', 'D']), 'badge': ('NN', ['B', 'AE1', 'JH']), 'badger': ('NN', ['B', 'AE1', 'JH', 'ER0']), 'badgered': ('VBN', ['B', 'AE1', 'JH', 'ER0', 'D']), 'badgering': ('NN', ['B', 'AE1', 'JH', 'ER0', 'IH0', 'NG']), 'c': ('NNS', ['S', 'IY1']), 'cab': ('NN', ['K', 'AE1', 'B']), 'cabal': ('NN', ['K', 'AH0', 'B', 'AA1', 'L']), 'cabaret': ('NN', ['K', 'AE2', 'B', 'ER0', 'EY1']), 'cabbage': ('NN', ['K', 'AE1', 'B', 'AH0', 'JH']), 'cabin': ('NN', ['K', 'AE1', 'B', 'AH0', 'N']), 'cabinet': ('NN', ['K', 'AE1', 'B', 'AH0', 'N', 'AH0', 'T']), 'cable': ('NN', ['K', 'EY1', 'B', 'AH0', 'L']), 'cabled': ('VBN', ['K', 'EY1', 'B', 'AH0', 'L', 'D']), 'cablegram': ('NN', ['K', 'EY1', 'B', 'AH0', 'L', 'G', 'R', 'AE2', 'M']), 'caboodle': ('NN', ['K', 'AH0', 'B', 'UW1', 'D', 'AH0', 'L']), 'caboose': ('NN', ['K', 'AH0', 'B', 'UW1', 'S']), 'cabotage': ('NN', ['K', 'AE1', 'B', 'AH0', 'T', 'IH0', 'JH']), 'cabriolet': ('NN', ['K', 'AE2', 'B', 'R', 'IY0', 'OW0', 'L', 'EY1']), 'cacao': ('NN', ['K', 'AH0', 'K', 'EY1', 'OW0']), 'cache': ('NN', ['K', 'AE1', 'SH']), 'cachet': ('NN', ['K', 'AE1', 'SH', 'EY0']), 'cacique': ('NN', ['K', 'AH0', 'S', 'IY1', 'K']), 'cackling': ('VBG', ['K', 'AE1', 'K', 'AH0', 'L', 'IH0', 'NG']), 'cackle': ('NN', ['K', 'AE1', 'K', 'AH0', 'L']), 'cacophony': ('NN', ['K', 'AE0', 'K', 'AA1', 'F', 'AH0', 'N', 'IY0']), 'cacti': ('NN', ['K', 'AE1', 'K', 'T', 'AY0']), 'cactus': ('NN', ['K', 'AE1', 'K', 'T', 'AH0', 'S']), 'cad': ('NN', ['K', 'AE1', 'D']), 'cadaver': ('NN', ['K', 'AH0', 'D', 'AE1', 'V', 'ER0']), 'caddies': ('NNS', ['K', 'AE1', 'D', 'IY0', 'Z']), 'caddy': ('NN', ['K', 'AE1', 'D', 'IY0']), 'cade': ('NN', ['K', 'EY1', 'D']), 'cadence': ('NN', ['K', 'EY1', 'D', 'AH0', 'N', 'S']), 'cadet': ('NN', ['K', 'AH0', 'D', 'EH1', 'T']), 'cadillac': ('NN', ['K', 'AE1', 'D', 'AH0', 'L', 'AE2', 'K']), 'cadmium': ('NN', ['K', 'AE1', 'D', 'M', 'IY0', 'AH0', 'M']), 'cadre': ('NN', ['K', 'AE1', 'D', 'R', 'IY0']), 'cady': ('NN', ['K', 'EY1', 'D', 'IY0']), 'caesar': ('NN', ['S', 'IY1', 'Z', 'ER0']), 'caesarean': ('NN', ['K', 'EY1', 'S', 'ER0', 'IY2', 'N']), 'cafe': ('NN', ['K', 'AH0', 'F', 'EY1']), 'caffeine': ('NN', ['K', 'AE0', 'F', 'IY1', 'N']), 'caftan': ('NN', ['K', 'AE1', 'F', 'T', 'AE2', 'N']), 'cage': ('NN', ['K', 'EY1', 'JH']), 'caged': ('VBN', ['K', 'EY1', 'JH', 'D']), 'cahoot': ('NN', ['K', 'AH0', 'HH', 'UW1', 'T']), 'cairn': ('NN', ['K', 'EH1', 'R', 'N']), 'caisson': ('NN', ['K', 'EY1', 'S', 'AH0', 'N']), 'cajoled': ('VBN', ['K', 'AH0', 'JH', 'OW1', 'L', 'D']), 'cajoling': ('VBG', ['K', 'AH0', 'JH', 'OW1', 'L', 'IH0', 'NG']), 'cajole': ('NN', ['K', 'AH0', 'JH', 'OW1', 'L']), 'cake': ('NN', ['K', 'EY1', 'K']), 'caked': ('NNS', ['K', 'EY1', 'K', 'T']), 'cal': ('JJ', ['K', 'AE1', 'L']), 'calamine': ('NN', ['K', 'AE1', 'L', 'AH0', 'M', 'AY2', 'N']), 'calamitous': ('JJ', ['K', 'AH0', 'L', 'AE1', 'M', 'AH0', 'T', 'AH0', 'S']), 'calamities': ('NNS', ['K', 'AH0', 'L', 'AE1', 'M', 'AH0', 'T', 'IY0', 'Z']), 'calamity': ('NN', ['K', 'AH0', 'L', 'AE1', 'M', 'AH0', 'T', 'IY0']), 'calcified': ('VBN', ['K', 'AE1', 'L', 'S', 'AH0', 'F', 'AY2', 'D']), 'calcify': ('NN', ['K', 'AE1', 'L', 'S', 'AH0', 'F', 'AY2']), 'calcite': ('NN', ['K', 'AE1', 'L', 'S', 'AY2', 'T']), 'calcium': ('NN', ['K', 'AE1', 'L', 'S', 'IY0', 'AH0', 'M']), 'calculating': ('VBG', ['K', 'AE1', 'L', 'K', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'calculate': ('NN', ['K', 'AE1', 'L', 'K', 'Y', 'AH0', 'L', 'EY2', 'T']), 'calculated': ('VBN', ['K', 'AE1', 'L', 'K', 'Y', 'AH0', 'L', 'EY2', 'T', 'AH0', 'D']), 'calculation': ('NN', ['K', 'AE2', 'L', 'K', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'calculator': ('NN', ['K', 'AE1', 'L', 'K', 'Y', 'AH0', 'L', 'EY2', 'T', 'ER0']), 'calculus': ('NN', ['K', 'AE1', 'L', 'K', 'Y', 'AH0', 'L', 'AH0', 'S']), 'caledonia': ('NN', ['K', 'AE2', 'L', 'AH0', 'D', 'OW1', 'N', 'IY0', 'AH0']), 'caledonian': ('NN', ['K', 'AE2', 'L', 'IH0', 'D', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'calendar': ('NN', ['K', 'AE1', 'L', 'AH0', 'N', 'D', 'ER0']), 'calendula': ('NN', ['K', 'AH0', 'L', 'EH1', 'JH', 'AH0', 'L', 'AH0']), 'calves': ('NNS', ['K', 'AE1', 'V', 'Z']), 'calf': ('NN', ['K', 'AE1', 'F']), 'cali': ('NN', ['K', 'AA1', 'L', 'IY0']), 'caliber': ('NN', ['K', 'AE1', 'L', 'AH0', 'B', 'ER0']), 'calibrate': ('NN', ['K', 'AE1', 'L', 'AH0', 'B', 'R', 'EY2', 'T']), 'calibration': ('NN', ['K', 'AE2', 'L', 'AH0', 'B', 'R', 'EY1', 'SH', 'AH0', 'N']), 'calico': ('NN', ['K', 'AE1', 'L', 'AH0', 'K', 'OW2']), 'calif': ('NN', ['K', 'AE2', 'L', 'AH0', 'F', 'AO1', 'R', 'N', 'Y', 'AH0']), 'californian': ('NN', ['K', 'AE2', 'L', 'IH0', 'F', 'AO1', 'R', 'N', 'Y', 'AH0', 'N']), 'calipers': ('NNS', ['K', 'AE1', 'L', 'AH0', 'P', 'ER0', 'Z']), 'caliph': ('NN', ['K', 'AE1', 'L', 'AH0', 'F']), 'calisthenics': ('NNS', ['K', 'AE2', 'L', 'AH0', 'S', 'TH', 'EH1', 'N', 'IH0', 'K', 'S']), 'calix': ('NN', ['K', 'AE1', 'L', 'IH0', 'K', 'S']), 'calk': ('NN', ['K', 'AO1', 'K']), 'calkin': ('NN', ['K', 'AE1', 'L', 'K', 'IH0', 'N']), 'called': ('VBN', ['K', 'AO1', 'L', 'D']), 'calling': ('VBG', ['K', 'AO1', 'L', 'IH0', 'NG']), 'call': ('NN', ['K', 'AO1', 'L']), 'calla': ('NN', ['K', 'AE1', 'L', 'AH0']), 'calle': ('NN', ['K', 'EY1', 'L']), 'caller': ('NN', ['K', 'AO1', 'L', 'ER0']), 'calligrapher': ('NN', ['K', 'AH0', 'L', 'IH1', 'G', 'R', 'AH0', 'F', 'ER0']), 'calligraphic': ('JJ', ['K', 'AE2', 'L', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K']), 'calligraphy': ('NN', ['K', 'AH0', 'L', 'IH1', 'G', 'R', 'AH0', 'F', 'IY0']), 'calliope': ('NN', ['K', 'AH0', 'L', 'AY1', 'AH0', 'P', 'IY2']), 'callous': ('JJ', ['K', 'AE1', 'L', 'AH0', 'S']), 'callow': ('NN', ['K', 'AE1', 'L', 'OW0']), 'callus': ('NN', ['K', 'AE1', 'L', 'AH0', 'S']), 'calm': ('NN', ['K', 'AA1', 'M']), 'calmed': ('VBN', ['K', 'AA1', 'M', 'D']), 'calming': ('VBG', ['K', 'AA1', 'M', 'IH0', 'NG']), 'calmer': ('NN', ['K', 'AA1', 'M', 'ER0']), 'calmly': ('NN', ['K', 'AA1', 'M', 'L', 'IY0']), 'calmness': ('NN', ['K', 'AA1', 'M', 'N', 'AH0', 'S']), 'e': ('NN', ['IY1']), 'each': ('DT', ['IY1', 'CH']), 'eager': ('NN', ['IY1', 'G', 'ER0']), 'eagerly': ('RB', ['IY1', 'G', 'ER0', 'L', 'IY0']), 'eagerness': ('NN', ['IY1', 'G', 'ER0', 'N', 'AH0', 'S']), 'eagle': ('NN', ['IY1', 'G', 'AH0', 'L']), 'ear': ('NN', ['IY1', 'R']), 'eared': ('VBN', ['IH1', 'R', 'D']), 'earl': ('NN', ['ER1', 'L']), 'earldom': ('NN', ['ER1', 'L', 'D', 'AH0', 'M']), 'earless': ('NN', ['IH1', 'R', 'L', 'AH0', 'S']), 'early': ('RB', ['ER1', 'L', 'IY0']), 'earmark': ('NN', ['IH1', 'R', 'M', 'AA2', 'R', 'K']), 'earmarked': ('VBN', ['IH1', 'R', 'M', 'AA2', 'R', 'K', 'T']), 'earmarking': ('VBG', ['IH1', 'R', 'M', 'AA2', 'R', 'K', 'IH0', 'NG']), 'earn': ('NN', ['ER1', 'N']), 'earned': ('VBN', ['ER1', 'N', 'D']), 'earning': ('VBG', ['ER1', 'N', 'IH0', 'NG']), 'earnest': ('NN', ['ER1', 'N', 'IH0', 'S', 'T']), 'earnestly': ('RB', ['ER1', 'N', 'AH0', 'S', 'T', 'L', 'IY0']), 'earnestness': ('NN', ['ER1', 'N', 'AH0', 'S', 'T', 'N', 'AH0', 'S']), 'earnings': ('NNS', ['ER1', 'N', 'IH0', 'NG', 'Z']), 'earring': ('VBG', ['IH1', 'R', 'IH0', 'NG']), 'earshot': ('NN', ['IH1', 'R', 'SH', 'AA2', 'T']), 'earth': ('NN', ['ER1', 'TH']), 'earthen': ('NN', ['ER1', 'TH', 'AH0', 'N']), 'earthenware': ('NN', ['ER1', 'TH', 'AH0', 'N', 'W', 'EH2', 'R']), 'earthling': ('VBG', ['ER1', 'TH', 'L', 'IH0', 'NG']), 'earthly': ('RB', ['ER1', 'TH', 'L', 'IY0']), 'earthquake': ('NN', ['ER1', 'TH', 'K', 'W', 'EY2', 'K']), 'earthstar': ('NN', ['ER1', 'TH', 'S', 'T', 'AA2', 'R']), 'earthwork': ('NN', ['ER1', 'TH', 'W', 'ER2', 'K']), 'earthworm': ('NN', ['ER1', 'TH', 'W', 'ER2', 'M']), 'earthy': ('NN', ['ER1', 'TH', 'IY0']), 'earwax': ('NN', ['IH1', 'R', 'W', 'AE2', 'K', 'S']), 'ease': ('NN', ['IY1', 'Z']), 'eased': ('VBN', ['IY1', 'Z', 'D']), 'easing': ('VBG', ['IY1', 'Z', 'IH0', 'NG']), 'easel': ('NN', ['IY1', 'Z', 'AH0', 'L']), 'easement': ('NN', ['IY1', 'Z', 'M', 'AH0', 'N', 'T']), 'easily': ('RB', ['IY1', 'Z', 'AH0', 'L', 'IY0']), 'east': ('NN', ['IY1', 'S', 'T']), 'easter': ('NN', ['IY1', 'S', 'T', 'ER0']), 'easterling': ('VBG', ['IY1', 'S', 'T', 'ER0', 'L', 'IH0', 'NG']), 'easterly': ('RB', ['IY1', 'S', 'T', 'ER0', 'L', 'IY0']), 'eastern': ('JJ', ['IY1', 'S', 'T', 'ER0', 'N']), 'eastward': ('NN', ['IY1', 'S', 'T', 'W', 'ER0', 'D']), 'easy': ('JJ', ['IY1', 'Z', 'IY0']), 'ate': ('NN', ['EY1', 'T']), 'eat': ('NN', ['IY1', 'T']), 'eaten': ('VB', ['IY1', 'T', 'AH0', 'N']), 'eating': ('VBG', ['IY1', 'T', 'IH0', 'NG']), 'eatable': ('JJ', ['IY1', 'T', 'AH0', 'B', 'AH0', 'L']), 'eater': ('NN', ['IY1', 'T', 'ER0']), 'eaves': ('NNS', ['IY1', 'V', 'Z']), 'eavesdrop': ('NN', ['IY1', 'V', 'Z', 'D', 'R', 'AA2', 'P']), 'eavesdropping': ('NN', ['IY1', 'V', 'Z', 'D', 'R', 'AA2', 'P', 'IH0', 'NG']), 'ebb': ('NN', ['EH1', 'B']), 'ebbed': ('NN', ['EH1', 'B', 'AH0', 'D']), 'ebbing': ('VBG', ['EH1', 'B', 'IH0', 'NG']), 'ebony': ('NN', ['EH1', 'B', 'AH0', 'N', 'IY0']), 'ebullience': ('NN', ['IH0', 'B', 'UH1', 'L', 'Y', 'AH0', 'N', 'S']), 'ebullient': ('NN', ['IH0', 'B', 'AH1', 'L', 'Y', 'AH0', 'N', 'T']), 'eccentric': ('JJ', ['IH0', 'K', 'S', 'EH1', 'N', 'T', 'R', 'IH0', 'K']), 'eccentricities': ('NNS', ['EH2', 'K', 'S', 'EH0', 'N', 'T', 'R', 'IH1', 'S', 'IH0', 'T', 'IY0', 'Z']), 'eccentricity': ('NN', ['EH2', 'K', 'S', 'AH0', 'N', 'T', 'R', 'IH1', 'S', 'AH0', 'T', 'IY0']), 'ecclesiastic': ('JJ', ['IH0', 'K', 'L', 'IY2', 'Z', 'IY0', 'AE1', 'S', 'T', 'IH0', 'K']), 'ecclesiastical': ('JJ', ['IH0', 'K', 'L', 'IY2', 'Z', 'IY0', 'AE1', 'S', 'T', 'IH0', 'K', 'AH0', 'L']), 'echelon': ('NN', ['EH1', 'SH', 'AH0', 'L', 'AA2', 'N']), 'echidna': ('NN', ['IH0', 'K', 'IH1', 'D', 'N', 'AH0']), 'echoes': ('NNS', ['EH1', 'K', 'OW0', 'Z']), 'echo': ('NN', ['EH1', 'K', 'OW0']), 'echoed': ('NN', ['EH1', 'K', 'OW0', 'D']), 'echoing': ('VBG', ['EH1', 'K', 'OW0', 'IH0', 'NG']), 'eclectic': ('JJ', ['IH0', 'K', 'L', 'EH1', 'K', 'T', 'IH0', 'K']), 'eclipse': ('NN', ['IH0', 'K', 'L', 'IH1', 'P', 'S']), 'eclipsed': ('VBN', ['IH0', 'K', 'L', 'IH1', 'P', 'S', 'T']), 'f': ('NN', ['EH1', 'F']), 'fabian': ('JJ', ['F', 'EY1', 'B', 'IY0', 'AH0', 'N']), 'fable': ('JJ', ['F', 'EY1', 'B', 'AH0', 'L']), 'fabled': ('VBN', ['F', 'EY1', 'B', 'AH0', 'L', 'D']), 'fabric': ('NN', ['F', 'AE1', 'B', 'R', 'IH0', 'K']), 'fabricant': ('NN', ['F', 'AE1', 'B', 'R', 'IH0', 'K', 'AH0', 'N', 'T']), 'fabricated': ('VBN', ['F', 'AE1', 'B', 'R', 'IH0', 'K', 'EY2', 'T', 'AH0', 'D']), 'fabricating': ('VBG', ['F', 'AE1', 'B', 'R', 'IH0', 'K', 'EY2', 'T', 'IH0', 'NG']), 'fabricate': ('NN', ['F', 'AE1', 'B', 'R', 'AH0', 'K', 'EY2', 'T']), 'fabrication': ('NN', ['F', 'AE2', 'B', 'R', 'IH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'fabricator': ('NN', ['F', 'AE1', 'B', 'R', 'IH0', 'K', 'EY2', 'T', 'ER0']), 'fabulous': ('JJ', ['F', 'AE1', 'B', 'Y', 'AH0', 'L', 'AH0', 'S']), 'fac': ('NN', ['F', 'AE1', 'K']), 'facade': ('NN', ['F', 'AH0', 'S', 'AA1', 'D']), 'face': ('NN', ['F', 'EY1', 'S']), 'faced': ('VBN', ['F', 'EY1', 'S', 'T']), 'facing': ('VBG', ['F', 'EY1', 'S', 'IH0', 'NG']), 'facet': ('NN', ['F', 'AE1', 'S', 'AH0', 'T']), 'faceted': ('VBN', ['F', 'AE1', 'S', 'AH0', 'T', 'IH0', 'D']), 'facetious': ('JJ', ['F', 'AH0', 'S', 'IY1', 'SH', 'AH0', 'S']), 'facial': ('JJ', ['F', 'EY1', 'SH', 'AH0', 'L']), 'facile': ('NN', ['F', 'AE1', 'S', 'AH0', 'L']), 'facilitated': ('VBN', ['F', 'AH0', 'S', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T', 'IH0', 'D']), 'facilitating': ('VBG', ['F', 'AH0', 'S', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'facilitate': ('NN', ['F', 'AH0', 'S', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T']), 'facilitation': ('NN', ['F', 'AH0', 'S', 'IH2', 'L', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'facilities': ('NNS', ['F', 'AH0', 'S', 'IH1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'facility': ('NN', ['F', 'AH0', 'S', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'facsimiles': ('NNS', ['F', 'AE0', 'K', 'S', 'IH1', 'M', 'AH0', 'L', 'IY0', 'Z']), 'facsimile': ('NN', ['F', 'AE0', 'K', 'S', 'IH1', 'M', 'AH0', 'L', 'IY0']), 'fact': ('NN', ['F', 'AE1', 'K', 'T']), 'faction': ('NN', ['F', 'AE1', 'K', 'SH', 'AH0', 'N']), 'factitious': ('JJ', ['F', 'AE0', 'K', 'T', 'IH1', 'SH', 'AH0', 'S']), 'facto': ('NN', ['F', 'AE1', 'K', 'T', 'OW0']), 'factor': ('NN', ['F', 'AE1', 'K', 'T', 'ER0']), 'factored': ('VBN', ['F', 'AE1', 'K', 'T', 'ER0', 'D']), 'factoring': ('VBG', ['F', 'AE1', 'K', 'T', 'ER0', 'IH0', 'NG']), 'factories': ('NNS', ['F', 'AE1', 'K', 'T', 'ER0', 'IY0', 'Z']), 'factory': ('NN', ['F', 'AE1', 'K', 'T', 'ER0', 'IY0']), 'factual': ('JJ', ['F', 'AE1', 'K', 'CH', 'UW0', 'AH0', 'L']), 'faculties': ('NNS', ['F', 'AE1', 'K', 'AH0', 'L', 'T', 'IY0', 'Z']), 'faculty': ('NN', ['F', 'AE1', 'K', 'AH0', 'L', 'T', 'IY0']), 'fad': ('NN', ['F', 'AE1', 'D']), 'fade': ('NN', ['F', 'EY1', 'D']), 'faded': ('VBD', ['F', 'EY1', 'D', 'AH0', 'D']), 'fading': ('NN', ['F', 'EY1', 'D', 'IH0', 'NG']), 'fader': ('NN', ['F', 'EY1', 'D', 'ER0']), 'fag': ('NN', ['F', 'AE1', 'G']), 'fahrenheit': ('NN', ['F', 'EH1', 'R', 'AH0', 'N', 'HH', 'AY2', 'T']), 'failed': ('VBD', ['F', 'EY1', 'L', 'D']), 'failing': ('VBG', ['F', 'EY1', 'L', 'IH0', 'NG']), 'fail': ('NN', ['F', 'EY1', 'L']), 'failure': ('NN', ['F', 'EY1', 'L', 'Y', 'ER0']), 'fain': ('NN', ['F', 'EY1', 'N']), 'faint': ('NN', ['F', 'EY1', 'N', 'T']), 'fainted': ('VBN', ['F', 'EY1', 'N', 'T', 'IH0', 'D']), 'fainting': ('NN', ['F', 'EY1', 'N', 'T', 'IH0', 'NG']), 'fainthearted': ('VBN', ['F', 'EY1', 'N', 'T', 'HH', 'AA1', 'R', 'T', 'IH0', 'D']), 'faintly': ('RB', ['F', 'EY1', 'N', 'T', 'L', 'IY0']), 'faintness': ('NN', ['F', 'EY1', 'N', 'T', 'N', 'AH0', 'S']), 'fair': ('NN', ['F', 'EH1', 'R']), 'fairly': ('RB', ['F', 'EH1', 'R', 'L', 'IY0']), 'fairness': ('NN', ['F', 'EH1', 'R', 'N', 'AH0', 'S']), 'fairway': ('NN', ['F', 'EH1', 'R', 'W', 'EY2']), 'fairies': ('NNS', ['F', 'EH1', 'R', 'IY0', 'Z']), 'fairy': ('NN', ['F', 'EH1', 'R', 'IY0']), 'fairyland': ('NN', ['F', 'EH1', 'R', 'IY0', 'L', 'AE2', 'N', 'D']), 'faith': ('NN', ['F', 'EY1', 'TH']), 'faithful': ('NN', ['F', 'EY1', 'TH', 'F', 'AH0', 'L']), 'fake': ('NN', ['F', 'EY1', 'K']), 'falcon': ('NN', ['F', 'AE1', 'L', 'K', 'AH0', 'N']), 'falconer': ('NN', ['F', 'AE1', 'L', 'K', 'AH0', 'N', 'ER0']), 'falconet': ('NN', ['F', 'AE2', 'L', 'K', 'AH0', 'N', 'EH1', 'T']), 'falconry': ('NN', ['F', 'AE1', 'L', 'K', 'AH0', 'N', 'R', 'IY0']), 'falk': ('NN', ['F', 'AO1', 'K']), 'fell': ('VBD', ['F', 'EH1', 'L']), 'fallen': ('VBN', ['F', 'AA1', 'L', 'AH0', 'N']), 'falling': ('VBG', ['F', 'AA1', 'L', 'IH0', 'NG']), 'fall': ('NN', ['F', 'AO1', 'L']), 'fallacious': ('JJ', ['F', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'S']), 'fallacies': ('NNS', ['F', 'AE1', 'L', 'AH0', 'S', 'IY0', 'Z']), 'fallacy': ('NN', ['F', 'AE1', 'L', 'AH0', 'S', 'IY0']), 'faller': ('NN', ['F', 'AO1', 'L', 'ER0']), 'fallibility': ('NN', ['F', 'AE2', 'L', 'IH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'fallible': ('JJ', ['F', 'AE1', 'L', 'AH0', 'B', 'AH0', 'L']), 'fallopian': ('JJ', ['F', 'AH0', 'L', 'OW1', 'P', 'IY0', 'AH0', 'N']), 'fallow': ('NN', ['F', 'AE1', 'L', 'OW2']), 'false': ('JJ', ['F', 'AO1', 'L', 'S']), 'falsehood': ('NN', ['F', 'AE1', 'L', 'S', 'HH', 'UH2', 'D']), 'falsely': ('RB', ['F', 'AO1', 'L', 'S', 'L', 'IY0']), 'falsettos': ('NNS', ['F', 'AO0', 'L', 'S', 'EH1', 'T', 'OW2', 'Z']), 'falsetto': ('NN', ['F', 'AO0', 'L', 'S', 'EH1', 'T', 'OW2']), 'falsification': ('NN', ['F', 'AE2', 'L', 'S', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'falsified': ('VBN', ['F', 'AO1', 'L', 'S', 'AH0', 'F', 'AY2', 'D']), 'falsifying': ('VBG', ['F', 'AO1', 'L', 'S', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'falsify': ('NN', ['F', 'AO1', 'L', 'S', 'AH0', 'F', 'AY2']), 'falsity': ('NN', ['F', 'AO1', 'L', 'S', 'AH0', 'T', 'IY0']), 'falter': ('NN', ['F', 'AO1', 'L', 'T', 'ER0']), 'faltered': ('VBN', ['F', 'AA1', 'L', 'T', 'ER0', 'D']), 'faltering': ('VBG', ['F', 'AO1', 'L', 'T', 'ER0', 'IH0', 'NG']), 'fame': ('NN', ['F', 'EY1', 'M']), 'famed': ('VBN', ['F', 'EY1', 'M', 'D']), 'familiar': ('JJ', ['F', 'AH0', 'M', 'IH1', 'L', 'Y', 'ER0']), 'familiarity': ('NN', ['F', 'AH0', 'M', 'IH2', 'L', 'Y', 'EH1', 'R', 'AH0', 'T', 'IY0']), 'familiarized': ('VBN', ['F', 'AH0', 'M', 'IH1', 'L', 'Y', 'ER0', 'AY2', 'Z', 'D']), 'familiarize': ('VB', ['F', 'AH0', 'M', 'IH1', 'L', 'Y', 'ER0', 'AY2', 'Z']), 'families': ('NNS', ['F', 'AE1', 'M', 'AH0', 'L', 'IY0', 'Z']), 'family': ('NN', ['F', 'AE1', 'M', 'AH0', 'L', 'IY0']), 'famine': ('NN', ['F', 'AE1', 'M', 'AH0', 'N']), 'famous': ('JJ', ['F', 'EY1', 'M', 'AH0', 'S']), 'famously': ('RB', ['F', 'EY1', 'M', 'AH0', 'S', 'L', 'IY0']), 'fan': ('NN', ['F', 'AE1', 'N']), 'fanned': ('VBN', ['F', 'AE1', 'N', 'D']), 'fanning': ('VBG', ['F', 'AE1', 'N', 'IH0', 'NG']), 'fanatic': ('JJ', ['F', 'AH0', 'N', 'AE1', 'T', 'IH0', 'K']), 'fanatical': ('JJ', ['F', 'AH0', 'N', 'AE1', 'T', 'IH0', 'K', 'AH0', 'L']), 'fanaticism': ('NN', ['F', 'AH0', 'N', 'AE1', 'T', 'AH0', 'S', 'IH2', 'Z', 'AH0', 'M']), 'fancied': ('VBN', ['F', 'AE1', 'N', 'S', 'IY0', 'D']), 'fancier': ('NN', ['F', 'AE1', 'N', 'S', 'IY0', 'ER0']), 'fanciful': ('JJ', ['F', 'AE1', 'N', 'S', 'IH0', 'F', 'AH0', 'L']), 'fancies': ('NNS', ['F', 'AE1', 'N', 'S', 'IY0', 'Z']), 'fancy': ('NN', ['F', 'AE1', 'N', 'S', 'IY0']), 'fandango': ('NN', ['F', 'AE0', 'N', 'D', 'AE1', 'NG', 'G', 'OW2']), 'fane': ('NN', ['F', 'EY1', 'N']), 'fanfare': ('NN', ['F', 'AE1', 'N', 'F', 'EH2', 'R']), 'fang': ('NN', ['F', 'AE1', 'NG']), 'fangle': ('NN', ['F', 'AE1', 'NG', 'G', 'AH0', 'L']), 'fangled': ('VBN', ['F', 'AE1', 'NG', 'G', 'AH0', 'L', 'D']), 'fanlike': ('NN', ['F', 'AE1', 'N', 'L', 'AY2', 'K']), 'fantail': ('NN', ['F', 'AE1', 'N', 'T', 'EY2', 'L']), 'fantasia': ('NN', ['F', 'AE0', 'N', 'T', 'EY1', 'ZH', 'AH0']), 'fantastic': ('JJ', ['F', 'AE0', 'N', 'T', 'AE1', 'S', 'T', 'IH0', 'K']), 'fantastically': ('RB', ['F', 'AE0', 'N', 'T', 'AE1', 'S', 'T', 'IH0', 'K', 'L', 'IY0']), 'fantasies': ('NNS', ['F', 'AE1', 'N', 'T', 'AH0', 'S', 'IY0', 'Z']), 'fantasy': ('NN', ['F', 'AE1', 'N', 'T', 'AH0', 'S', 'IY0']), 'far': ('RB', ['F', 'AA1', 'R']), 'farce': ('NN', ['F', 'AA1', 'R', 'S']), 'farcical': ('JJ', ['F', 'AA1', 'R', 'S', 'AH0', 'K', 'AH0', 'L']), 'fared': ('VBN', ['F', 'EH1', 'R', 'D']), 'faring': ('VBG', ['F', 'EH1', 'R', 'IY0', 'NG']), 'fare': ('NN', ['F', 'EH1', 'R']), 'farewell': ('NN', ['F', 'EH2', 'R', 'W', 'EH1', 'L']), 'farfetched': ('VBN', ['F', 'AA1', 'R', 'F', 'EH1', 'CH', 'T']), 'farina': ('NN', ['F', 'ER0', 'IY1', 'N', 'AH0']), 'farm': ('NN', ['F', 'AA1', 'R', 'M']), 'farmed': ('VBN', ['F', 'AA1', 'R', 'M', 'D']), 'farming': ('VBG', ['F', 'AA1', 'R', 'M', 'IH0', 'NG']), 'farmer': ('NN', ['F', 'AA1', 'R', 'M', 'ER0']), 'farmhouse': ('NN', ['F', 'AA1', 'R', 'M', 'HH', 'AW2', 'S']), 'farmstead': ('NN', ['F', 'AA1', 'R', 'M', 'S', 'T', 'EH2', 'D']), 'farmyard': ('NN', ['F', 'AA1', 'R', 'M', 'Y', 'AA2', 'R', 'D']), 'faro': ('NN', ['F', 'EH1', 'R', 'OW0']), 'farrand': ('NN', ['F', 'AE1', 'R', 'AH0', 'N', 'D']), 'farrier': ('NN', ['F', 'EH1', 'R', 'IY0', 'ER0']), 'farrow': ('NN', ['F', 'EH1', 'R', 'OW2']), 'farrowing': ('VBG', ['F', 'AE1', 'R', 'OW2', 'IH0', 'NG']), 'farry': ('NN', ['F', 'AE1', 'R', 'IY0']), 'farsighted': ('VBN', ['F', 'AA1', 'R', 'S', 'AY1', 'T', 'AH0', 'D']), 'farsightedness': ('NN', ['F', 'AA2', 'R', 'S', 'AY1', 'T', 'IH0', 'D', 'N', 'AH0', 'S']), 'farther': ('NN', ['F', 'AA1', 'R', 'DH', 'ER0']), 'farthest': ('NN', ['F', 'AA1', 'R', 'DH', 'AH0', 'S', 'T']), 'farthing': ('VBG', ['F', 'AA1', 'R', 'DH', 'IH0', 'NG']), 'farthingale': ('NN', ['F', 'AA1', 'R', 'DH', 'IH0', 'NG', 'G', 'EY2', 'L']), 'fascinated': ('VBN', ['F', 'AE1', 'S', 'AH0', 'N', 'EY2', 'T', 'AH0', 'D']), 'fascinating': ('VBG', ['F', 'AE1', 'S', 'AH0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'fascinate': ('NN', ['F', 'AE1', 'S', 'AH0', 'N', 'EY2', 'T']), 'fascination': ('NN', ['F', 'AE2', 'S', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'fashion': ('NN', ['F', 'AE1', 'SH', 'AH0', 'N']), 'fashioned': ('VBN', ['F', 'AE1', 'SH', 'AH0', 'N', 'D']), 'fashioning': ('VBG', ['F', 'AE1', 'SH', 'AH0', 'N', 'IH0', 'NG']), 'fashionable': ('JJ', ['F', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'fashionably': ('RB', ['F', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'B', 'L', 'IY0']), 'fasted': ('VBN', ['F', 'AE1', 'S', 'T', 'IH0', 'D']), 'fasting': ('VBG', ['F', 'AE1', 'S', 'T', 'IH0', 'NG']), 'fast': ('NN', ['F', 'AE1', 'S', 'T']), 'fastened': ('VBN', ['F', 'AE1', 'S', 'AH0', 'N', 'D']), 'fastening': ('VBG', ['F', 'AE1', 'S', 'AH0', 'N', 'IH0', 'NG']), 'fasten': ('NNS', ['F', 'AE1', 'S', 'AH0', 'N']), 'fastener': ('NN', ['F', 'AE1', 'S', 'AH0', 'N', 'ER0']), 'faster': ('NN', ['F', 'AE1', 'S', 'T', 'ER0']), 'fastidious': ('JJ', ['F', 'AE0', 'S', 'T', 'IH1', 'D', 'IY0', 'AH0', 'S']), 'fastness': ('NN', ['F', 'AE1', 'S', 'T', 'N', 'AH0', 'S']), 'fat': ('NN', ['F', 'AE1', 'T']), 'fatal': ('NN', ['F', 'EY1', 'T', 'AH0', 'L']), 'fatalism': ('NN', ['F', 'EY1', 'T', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'fatalist': ('NN', ['F', 'EY1', 'T', 'AH0', 'L', 'IH0', 'S', 'T']), 'fatalistic': ('JJ', ['F', 'EY0', 'T', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K']), 'fatalities': ('NNS', ['F', 'AH0', 'T', 'AE1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'fatality': ('NN', ['F', 'AH0', 'T', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'fatally': ('RB', ['F', 'EY1', 'T', 'AH0', 'L', 'IY0']), 'fate': ('NN', ['F', 'EY1', 'T']), 'fated': ('VBN', ['F', 'EY1', 'T', 'IH0', 'D']), 'fateful': ('NN', ['F', 'EY1', 'T', 'F', 'AH0', 'L']), 'father': ('NN', ['F', 'AA1', 'DH', 'ER0']), 'fathered': ('VBN', ['F', 'AA1', 'DH', 'ER0', 'D']), 'fathering': ('VBG', ['F', 'AA1', 'DH', 'ER0', 'IH0', 'NG']), 'fatherhood': ('NN', ['F', 'AA1', 'DH', 'ER0', 'HH', 'UH2', 'D']), 'fatherland': ('NN', ['F', 'AA1', 'DH', 'ER0', 'L', 'AE2', 'N', 'D']), 'fatherless': ('NN', ['F', 'AA1', 'DH', 'ER0', 'L', 'AH0', 'S']), 'fatherly': ('RB', ['F', 'AA1', 'DH', 'ER0', 'L', 'IY0']), 'fathom': ('NN', ['F', 'AE1', 'DH', 'AH0', 'M']), 'fathomable': ('JJ', ['F', 'AE1', 'DH', 'AH0', 'M', 'AH0', 'B', 'AH0', 'L']), 'fatigue': ('NN', ['F', 'AH0', 'T', 'IY1', 'G']), 'fatigued': ('VBN', ['F', 'AH0', 'T', 'IY1', 'G', 'D']), 'fatiguing': ('VBG', ['F', 'AH0', 'T', 'IY1', 'G', 'IH0', 'NG']), 'fattened': ('VBN', ['F', 'AE1', 'T', 'AH0', 'N', 'D']), 'fatten': ('NNS', ['F', 'AE1', 'T', 'AH0', 'N']), 'fatty': ('NN', ['F', 'AE1', 'T', 'IY0']), 'fatuous': ('JJ', ['F', 'AE1', 'CH', 'AH0', 'W', 'AH0', 'S']), 'faucet': ('NN', ['F', 'AO1', 'S', 'AH0', 'T']), 'faulcon': ('NN', ['F', 'AO1', 'L', 'K', 'AH0', 'N']), 'fault': ('NN', ['F', 'AO1', 'L', 'T']), 'faulted': ('VBN', ['F', 'AO1', 'L', 'T', 'IH0', 'D']), 'faulting': ('VBG', ['F', 'AO1', 'L', 'T', 'IH0', 'NG']), 'faulty': ('NN', ['F', 'AO1', 'L', 'T', 'IY0']), 'fauna': ('NN', ['F', 'AO1', 'N', 'AH0']), 'faunal': ('JJ', ['F', 'AA1', 'N', 'AH0', 'L']), 'faux': ('NN', ['F', 'AO1', 'K', 'S']), 'favor': ('NN', ['F', 'EY1', 'V', 'ER0']), 'favored': ('VBN', ['F', 'EY1', 'V', 'ER0', 'D']), 'favoring': ('VBG', ['F', 'EY1', 'V', 'ER0', 'IH0', 'NG']), 'favorable': ('JJ', ['F', 'EY1', 'V', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'favorite': ('NN', ['F', 'EY1', 'V', 'ER0', 'IH0', 'T']), 'favoritism': ('NN', ['F', 'EY1', 'V', 'ER0', 'IH0', 'T', 'IH2', 'Z', 'AH0', 'M']), 'fawn': ('NN', ['F', 'AO1', 'N']), 'fawning': ('VBG', ['F', 'AO1', 'N', 'IH0', 'NG']), 'faxed': ('NN', ['F', 'AE1', 'K', 'S', 'T']), 'fay': ('NN', ['F', 'EY1']), 'fayed': ('NNS', ['F', 'EY1', 'D']), 'faze': ('NN', ['F', 'EY1', 'Z']), 'fealty': ('NN', ['F', 'IY1', 'AH0', 'L', 'T', 'IY0']), 'fear': ('NN', ['F', 'IH1', 'R']), 'feared': ('VBN', ['F', 'IH1', 'R', 'D']), 'fearing': ('VBG', ['F', 'IH1', 'R', 'IH0', 'NG']), 'fearful': ('NN', ['F', 'IH1', 'R', 'F', 'AH0', 'L']), 'fearless': ('NN', ['F', 'IH1', 'R', 'L', 'AH0', 'S']), 'fearsome': ('NN', ['F', 'IH1', 'R', 'S', 'AH0', 'M']), 'feasibility': ('NN', ['F', 'IY2', 'Z', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'feasible': ('JJ', ['F', 'IY1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'feast': ('NN', ['F', 'IY1', 'S', 'T']), 'feasted': ('VBN', ['F', 'IY1', 'S', 'T', 'IH0', 'D']), 'feasting': ('VBG', ['F', 'IY1', 'S', 'T', 'IH0', 'NG']), 'feaster': ('NN', ['F', 'IY1', 'S', 'T', 'ER0']), 'feat': ('NN', ['F', 'IY1', 'T']), 'feather': ('NN', ['F', 'EH1', 'DH', 'ER0']), 'feathered': ('VBN', ['F', 'EH1', 'DH', 'ER0', 'D']), 'feathering': ('VBG', ['F', 'EH1', 'DH', 'ER0', 'IH0', 'NG']), 'featherless': ('NN', ['F', 'EH1', 'DH', 'ER0', 'L', 'AH0', 'S']), 'featherly': ('RB', ['F', 'EH1', 'DH', 'ER0', 'L', 'IY0']), 'feathery': ('NN', ['F', 'EH1', 'DH', 'ER0', 'IY0']), 'feature': ('NN', ['F', 'IY1', 'CH', 'ER0']), 'featured': ('VBN', ['F', 'IY1', 'CH', 'ER0', 'D']), 'featureless': ('NN', ['F', 'IY1', 'CH', 'ER0', 'L', 'AH0', 'S']), 'february': ('JJ', ['F', 'EH1', 'B', 'Y', 'AH0', 'W', 'EH2', 'R', 'IY0']), 'fecal': ('JJ', ['F', 'IY1', 'K', 'AH0', 'L']), 'feces': ('NNS', ['F', 'IY1', 'S', 'IY2', 'Z']), 'feckless': ('NN', ['F', 'EH1', 'K', 'L', 'IH0', 'S']), 'fecundity': ('NN', ['F', 'AH0', 'K', 'AH1', 'N', 'D', 'IH0', 'T', 'IY0']), 'fed': ('NN', ['F', 'EH1', 'D']), 'federal': ('JJ', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L']), 'federalism': ('NN', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'federalist': ('NN', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L', 'IH0', 'S', 'T']), 'federalized': ('VBN', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'D']), 'federalizing': ('VBG', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'federalize': ('VB', ['F', 'EH1', 'D', 'ER0', 'AH0', 'L', 'AY2', 'Z']), 'federate': ('NN', ['F', 'EH1', 'D', 'ER0', 'EY2', 'T']), 'federation': ('NN', ['F', 'EH2', 'D', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'federative': ('JJ', ['F', 'EH1', 'D', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'fee': ('NN', ['F', 'IY1']), 'feed': ('NN', ['F', 'IY1', 'D']), 'feeble': ('JJ', ['F', 'IY1', 'B', 'AH0', 'L']), 'feebly': ('RB', ['F', 'IY1', 'B', 'L', 'IY0']), 'feeding': ('VBG', ['F', 'IY1', 'D', 'IH0', 'NG']), 'feeder': ('NN', ['F', 'IY1', 'D', 'ER0']), 'felt': ('NN', ['F', 'EH1', 'L', 'T']), 'feeling': ('VBG', ['F', 'IY1', 'L', 'IH0', 'NG']), 'feel': ('NN', ['F', 'IY1', 'L']), 'feeler': ('NN', ['F', 'IY1', 'L', 'ER0']), 'feese': ('JJ', ['F', 'IY1', 'Z']), 'feet': ('NNS', ['F', 'IY1', 'T']), 'fehling': ('VBG', ['F', 'EH1', 'L', 'IH0', 'NG']), 'feigned': ('VBN', ['F', 'EY1', 'N', 'D']), 'feigning': ('VBG', ['F', 'EY1', 'N', 'IH0', 'NG']), 'feign': ('NN', ['F', 'EY1', 'N']), 'feint': ('NN', ['F', 'EY1', 'N', 'T']), 'feldspar': ('NN', ['F', 'EH1', 'L', 'D', 'S', 'P', 'AA2', 'R']), 'felicitate': ('NN', ['F', 'AH0', 'L', 'IH1', 'S', 'IH0', 'T', 'EY2', 'T']), 'felicitous': ('JJ', ['F', 'IH0', 'L', 'IH1', 'S', 'AH0', 'T', 'AH0', 'S']), 'felicity': ('NN', ['F', 'IH0', 'L', 'IH1', 'S', 'AH0', 'T', 'IY0']), 'feline': ('NN', ['F', 'IY1', 'L', 'AY2', 'N']), 'felis': ('NN', ['F', 'EH1', 'L', 'IH0', 'S']), 'felled': ('VBN', ['F', 'EH1', 'L', 'D']), 'felling': ('VBG', ['F', 'EH1', 'L', 'IH0', 'NG']), 'feller': ('NN', ['F', 'EH1', 'L', 'ER0']), 'fellow': ('NN', ['F', 'EH1', 'L', 'OW0']), 'fellowship': ('NN', ['F', 'EH1', 'L', 'OW0', 'SH', 'IH2', 'P']), 'felon': ('NN', ['F', 'EH1', 'L', 'AH0', 'N']), 'felonious': ('JJ', ['F', 'EH0', 'L', 'OW1', 'N', 'IY0', 'AH0', 'S']), 'felonies': ('NNS', ['F', 'EH1', 'L', 'AH0', 'N', 'IY0', 'Z']), 'felony': ('NN', ['F', 'EH1', 'L', 'AH0', 'N', 'IY0']), 'felter': ('NN', ['F', 'EH1', 'L', 'T', 'ER0']), 'felucca': ('NN', ['F', 'IH0', 'L', 'AH1', 'K', 'AH0']), 'female': ('NN', ['F', 'IY1', 'M', 'EY2', 'L']), 'feminine': ('NN', ['F', 'EH1', 'M', 'AH0', 'N', 'AH0', 'N']), 'femininity': ('NN', ['F', 'EH2', 'M', 'AH0', 'N', 'IH1', 'N', 'AH0', 'T', 'IY0']), 'femme': ('NN', ['F', 'EH1', 'M']), 'femoral': ('JJ', ['F', 'EH1', 'M', 'ER0', 'AH0', 'L']), 'femur': ('NN', ['F', 'IY1', 'M', 'ER0']), 'fen': ('NN', ['F', 'EH1', 'N']), 'fence': ('NN', ['F', 'EH1', 'N', 'S']), 'fencing': ('VBG', ['F', 'EH1', 'N', 'S', 'IH0', 'NG']), 'fenceless': ('NN', ['F', 'EH1', 'N', 'S', 'L', 'AH0', 'S']), 'fend': ('NN', ['F', 'EH1', 'N', 'D']), 'fended': ('VBN', ['F', 'EH1', 'N', 'D', 'IH0', 'D']), 'fending': ('VBG', ['F', 'EH1', 'N', 'D', 'IH0', 'NG']), 'fender': ('NN', ['F', 'EH1', 'N', 'D', 'ER0']), 'fenian': ('JJ', ['F', 'IY1', 'N', 'IY0', 'AH0', 'N']), 'fennel': ('NN', ['F', 'EH1', 'N', 'AH0', 'L']), 'fenugreek': ('NN', ['F', 'EH1', 'N', 'UW0', 'G', 'R', 'IY2', 'K']), 'fer': ('NN', ['F', 'ER1']), 'feral': ('JJ', ['F', 'EH1', 'R', 'AH0', 'L']), 'feria': ('NNS', ['F', 'EH1', 'R', 'IY0', 'AH0']), 'ferm': ('NN', ['F', 'ER1', 'M']), 'ferment': ('NN', ['F', 'ER0', 'M', 'EH1', 'N', 'T']), 'fermented': ('VBN', ['F', 'ER0', 'M', 'EH1', 'N', 'T', 'AH0', 'D']), 'fermenting': ('VBG', ['F', 'ER0', 'M', 'EH1', 'N', 'T', 'IH0', 'NG']), 'fermentation': ('NN', ['F', 'ER2', 'M', 'AH0', 'N', 'T', 'EY1', 'SH', 'AH0', 'N']), 'fern': ('NN', ['F', 'ER1', 'N']), 'fernery': ('NN', ['F', 'ER1', 'N', 'ER0', 'IY0']), 'ferocious': ('JJ', ['F', 'ER0', 'OW1', 'SH', 'AH0', 'S']), 'ferocity': ('NN', ['F', 'ER0', 'AA1', 'S', 'AH0', 'T', 'IY0']), 'ferrara': ('NN', ['F', 'ER0', 'AA1', 'R', 'AH0']), 'ferre': ('NN', ['F', 'EH1', 'R']), 'ferrer': ('NN', ['F', 'EH1', 'R', 'ER0']), 'ferret': ('NN', ['F', 'EH1', 'R', 'AH0', 'T']), 'ferreted': ('VBN', ['F', 'EH1', 'R', 'AH0', 'T', 'AH0', 'D']), 'ferreting': ('VBG', ['F', 'EH1', 'R', 'AH0', 'T', 'IH0', 'NG']), 'ferrier': ('NN', ['F', 'EH1', 'R', 'IY0', 'ER0']), 'ferrotype': ('NN', ['F', 'EH1', 'R', 'AH0', 'T', 'AY2', 'P']), 'ferrous': ('JJ', ['F', 'EH1', 'R', 'AH0', 'S']), 'ferried': ('VBN', ['F', 'EH1', 'R', 'IY0', 'D']), 'ferrying': ('VBG', ['F', 'EH1', 'R', 'IY0', 'IH0', 'NG']), 'ferry': ('NN', ['F', 'EH1', 'R', 'IY0']), 'ferries': ('NNS', ['F', 'EH1', 'R', 'IY0', 'Z']), 'ferryboat': ('NN', ['F', 'EH1', 'R', 'IY0', 'B', 'OW2', 'T']), 'ferryman': ('NN', ['F', 'EH1', 'R', 'IY0', 'M', 'AH0', 'N']), 'fertile': ('NN', ['F', 'ER1', 'T', 'AH0', 'L']), 'fertility': ('NN', ['F', 'ER0', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'fertilization': ('NN', ['F', 'ER2', 'T', 'AH0', 'L', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'fertilized': ('VBN', ['F', 'ER1', 'T', 'AH0', 'L', 'AY2', 'Z', 'D']), 'fertilizing': ('VBG', ['F', 'ER1', 'T', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'fertilize': ('VB', ['F', 'ER1', 'T', 'AH0', 'L', 'AY2', 'Z']), 'fertilizer': ('NN', ['F', 'ER1', 'T', 'AH0', 'L', 'AY2', 'Z', 'ER0']), 'fervent': ('NN', ['F', 'ER1', 'V', 'AH0', 'N', 'T']), 'fervid': ('NN', ['F', 'ER1', 'V', 'AH0', 'D']), 'fervor': ('NN', ['F', 'ER1', 'V', 'ER0']), 'fescue': ('NN', ['F', 'EH1', 'S', 'K', 'Y', 'UW2']), 'fess': ('NN', ['F', 'EH1', 'S']), 'fest': ('NN', ['F', 'EH1', 'S', 'T']), 'festered': ('VBN', ['F', 'EH1', 'S', 'T', 'ER0', 'D']), 'festering': ('VBG', ['F', 'EH1', 'S', 'T', 'ER0', 'IH0', 'NG']), 'fester': ('NN', ['F', 'EH1', 'S', 'T', 'ER0']), 'festival': ('NN', ['F', 'EH1', 'S', 'T', 'AH0', 'V', 'AH0', 'L']), 'festive': ('JJ', ['F', 'EH1', 'S', 'T', 'IH0', 'V']), 'festivities': ('NNS', ['F', 'EH0', 'S', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0', 'Z']), 'festivity': ('NN', ['F', 'EH0', 'S', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0']), 'festoon': ('NN', ['F', 'EH2', 'S', 'T', 'UW1', 'N']), 'festooned': ('VBN', ['F', 'EH2', 'S', 'T', 'UW1', 'N', 'D']), 'fetal': ('NN', ['F', 'IY1', 'T', 'AH0', 'L']), 'fetched': ('VBN', ['F', 'EH1', 'CH', 'T']), 'fetching': ('VBG', ['F', 'EH1', 'CH', 'IH0', 'NG']), 'fetch': ('NN', ['F', 'EH1', 'CH']), 'fete': ('NN', ['F', 'EY1', 'T']), 'feted': ('VBN', ['F', 'EY1', 'T', 'IH0', 'D']), 'fetish': ('JJ', ['F', 'EH1', 'T', 'IH0', 'SH']), 'fetishism': ('NN', ['F', 'EH1', 'T', 'IH0', 'SH', 'IH2', 'Z', 'AH0', 'M']), 'fetishist': ('NN', ['F', 'EH1', 'T', 'IH0', 'SH', 'IH0', 'S', 'T']), 'fetid': ('NN', ['F', 'EH1', 'T', 'AH0', 'D']), 'fette': ('NN', ['F', 'EH1', 'T']), 'fetters': ('NNS', ['F', 'EH1', 'T', 'ER0', 'Z']), 'fetter': ('NN', ['F', 'EH1', 'T', 'ER0']), 'fettered': ('VBN', ['F', 'EH1', 'T', 'ER0', 'D']), 'fetuses': ('NNS', ['F', 'IY1', 'T', 'AH0', 'S', 'IH0', 'Z']), 'fetus': ('NN', ['F', 'IY1', 'T', 'AH0', 'S']), 'feud': ('NN', ['F', 'Y', 'UW1', 'D']), 'feudal': ('NN', ['F', 'Y', 'UW1', 'D', 'AH0', 'L']), 'feudalism': ('NN', ['F', 'Y', 'UW1', 'D', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'fever': ('NN', ['F', 'IY1', 'V', 'ER0']), 'fevered': ('VBN', ['F', 'IY1', 'V', 'ER0', 'D']), 'feverfew': ('NN', ['F', 'IY1', 'V', 'ER0', 'F', 'Y', 'UW2']), 'feverish': ('JJ', ['F', 'IY1', 'V', 'ER0', 'IH0', 'SH']), 'few': ('JJ', ['F', 'Y', 'UW1']), 'fey': ('NN', ['F', 'EY1']), 'fez': ('NN', ['F', 'EH1', 'Z']), 'fiance': ('NN', ['F', 'IY0', 'AA1', 'N', 'S', 'EY2']), 'fiancee': ('NN', ['F', 'IY0', 'AE1', 'N', 'S', 'IY0']), 'fiasco': ('NN', ['F', 'IY0', 'AE1', 'S', 'K', 'OW0']), 'fiat': ('NN', ['F', 'AY1', 'AE0', 'T']), 'fiber': ('NN', ['F', 'AY1', 'B', 'ER0']), 'fibre': ('NN', ['F', 'AY1', 'B', 'ER0']), 'fibrillation': ('NN', ['F', 'IH2', 'B', 'R', 'IH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'fibrin': ('NN', ['F', 'AY1', 'B', 'R', 'AH0', 'N']), 'fibroid': ('NN', ['F', 'AY1', 'B', 'R', 'OY0', 'D']), 'fibrous': ('JJ', ['F', 'AY1', 'B', 'R', 'AH0', 'S']), 'fibula': ('NN', ['F', 'IH1', 'B', 'Y', 'AH0', 'L', 'AH0']), 'fickle': ('NN', ['F', 'IH1', 'K', 'AH0', 'L']), 'fickleness': ('NN', ['F', 'IH1', 'K', 'AH0', 'L', 'N', 'AH0', 'S']), 'fico': ('NN', ['F', 'IY1', 'K', 'OW0']), 'fiction': ('NN', ['F', 'IH1', 'K', 'SH', 'AH0', 'N']), 'fictional': ('JJ', ['F', 'IH1', 'K', 'SH', 'AH0', 'N', 'AH0', 'L']), 'fictitious': ('JJ', ['F', 'IH0', 'K', 'T', 'IH1', 'SH', 'AH0', 'S']), 'fidalgo': ('NN', ['F', 'IY0', 'D', 'AA1', 'L', 'G', 'OW0']), 'fiddle': ('NN', ['F', 'IH1', 'D', 'AH0', 'L']), 'fiddled': ('VBN', ['F', 'IH1', 'D', 'AH0', 'L', 'D']), 'fiddling': ('VBG', ['F', 'IH1', 'D', 'L', 'IH0', 'NG']), 'fiddler': ('NN', ['F', 'IH1', 'D', 'AH0', 'L', 'ER0']), 'fidelity': ('NN', ['F', 'AH0', 'D', 'EH1', 'L', 'AH0', 'T', 'IY0']), 'fides': ('NNS', ['F', 'AY1', 'D', 'Z']), 'fidget': ('NN', ['F', 'IH1', 'JH', 'IH0', 'T']), 'fiduciary': ('JJ', ['F', 'AH0', 'D', 'UW1', 'SH', 'IY0', 'EH2', 'R', 'IY0']), 'fief': ('NN', ['F', 'IY1', 'F']), 'field': ('NN', ['F', 'IY1', 'L', 'D']), 'fielded': ('VBN', ['F', 'IY1', 'L', 'D', 'IH0', 'D']), 'fielding': ('NN', ['F', 'IY1', 'L', 'D', 'IH0', 'NG']), 'fielden': ('NN', ['F', 'IY1', 'L', 'D', 'AH0', 'N']), 'fielder': ('NN', ['F', 'IY1', 'L', 'D', 'ER0']), 'fieldwork': ('NN', ['F', 'IY1', 'L', 'D', 'W', 'ER2', 'K']), 'fiend': ('NN', ['F', 'IY1', 'N', 'D']), 'fiendish': ('JJ', ['F', 'IY1', 'N', 'D', 'IH0', 'SH']), 'fierce': ('NN', ['F', 'IH1', 'R', 'S']), 'fiery': ('NN', ['F', 'AY1', 'ER0', 'IY0']), 'fife': ('NN', ['F', 'AY1', 'F']), 'fifer': ('NN', ['F', 'AY1', 'F', 'ER0']), 'fifteen': ('NN', ['F', 'IH0', 'F', 'T', 'IY1', 'N']), 'fifteenth': ('NN', ['F', 'IH0', 'F', 'T', 'IY1', 'N', 'TH']), 'fifth': ('NN', ['F', 'IH1', 'F', 'TH']), 'fiftieth': ('NNS', ['F', 'IH1', 'F', 'T', 'IY0', 'IH0', 'TH']), 'fifty': ('NN', ['F', 'IH1', 'F', 'T', 'IY0']), 'fifties': ('NNS', ['F', 'IH1', 'F', 'T', 'IY0', 'Z']), 'fig': ('NN', ['F', 'IH1', 'G']), 'figaro': ('NN', ['F', 'IH1', 'G', 'ER0', 'OW2']), 'fought': ('NN', ['F', 'AO1', 'T']), 'fighting': ('VBG', ['F', 'AY1', 'T', 'IH0', 'NG']), 'fight': ('NN', ['F', 'AY1', 'T']), 'fighter': ('NN', ['F', 'AY1', 'T', 'ER0']), 'figment': ('NN', ['F', 'IH1', 'G', 'M', 'IH0', 'N', 'T']), 'figuration': ('NN', ['F', 'IH2', 'G', 'Y', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'figurative': ('JJ', ['F', 'IH1', 'G', 'Y', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'figure': ('NN', ['F', 'IH1', 'G', 'Y', 'ER0']), 'figured': ('VBN', ['F', 'IH1', 'G', 'Y', 'ER0', 'D']), 'figuring': ('VBG', ['F', 'IH1', 'G', 'Y', 'ER0', 'IH0', 'NG']), 'figurehead': ('NN', ['F', 'IH1', 'G', 'Y', 'ER0', 'HH', 'EH2', 'D']), 'figurine': ('NN', ['F', 'IH2', 'G', 'Y', 'ER0', 'IY1', 'N']), 'fijian': ('JJ', ['F', 'IY1', 'JH', 'IY0', 'AH0', 'N']), 'fike': ('NN', ['F', 'AY1', 'K']), 'fil': ('NN', ['F', 'IH1', 'L']), 'filament': ('NN', ['F', 'IH1', 'L', 'AH0', 'M', 'AH0', 'N', 'T']), 'filbert': ('NN', ['F', 'IH1', 'L', 'B', 'ER0', 'T']), 'filched': ('VBN', ['F', 'IH1', 'L', 'CH', 'T']), 'filch': ('NN', ['F', 'IH1', 'L', 'CH']), 'file': ('NN', ['F', 'AY1', 'L']), 'filed': ('VBN', ['F', 'AY1', 'L', 'D']), 'filing': ('NN', ['F', 'AY1', 'L', 'IH0', 'NG']), 'filer': ('NN', ['F', 'AY1', 'L', 'ER0']), 'filial': ('JJ', ['F', 'IH1', 'L', 'IY0', 'AH0', 'L']), 'filibuster': ('NN', ['F', 'IH1', 'L', 'AH0', 'B', 'AH2', 'S', 'T', 'ER0']), 'filibustering': ('VBG', ['F', 'IH1', 'L', 'AH0', 'B', 'AH2', 'S', 'T', 'ER0', 'IH0', 'NG']), 'filigree': ('NN', ['F', 'IH1', 'L', 'AH0', 'G', 'R', 'IY2']), 'fill': ('NN', ['F', 'IH1', 'L']), 'filled': ('VBN', ['F', 'IH1', 'L', 'D']), 'filling': ('VBG', ['F', 'IH1', 'L', 'IH0', 'NG']), 'filler': ('NN', ['F', 'IH1', 'L', 'ER0']), 'fillet': ('NN', ['F', 'AH0', 'L', 'EY1']), 'filleted': ('VBN', ['F', 'IH1', 'L', 'AH0', 'T', 'IH0', 'D']), 'fillibuster': ('NN', ['F', 'IH1', 'L', 'AH0', 'B', 'AH2', 'S', 'T', 'ER0']), 'fillip': ('NN', ['F', 'IH1', 'L', 'AH0', 'P']), 'fillies': ('NNS', ['F', 'IH1', 'L', 'IY0', 'Z']), 'filly': ('RB', ['F', 'IH1', 'L', 'IY0']), 'film': ('NN', ['F', 'IH1', 'L', 'M']), 'filter': ('NN', ['F', 'IH1', 'L', 'T', 'ER0']), 'filtered': ('VBN', ['F', 'IH1', 'L', 'T', 'ER0', 'D']), 'filtering': ('VBG', ['F', 'IH1', 'L', 'T', 'ER0', 'IH0', 'NG']), 'filth': ('NN', ['F', 'IH1', 'L', 'TH']), 'filthy': ('NN', ['F', 'IH1', 'L', 'TH', 'IY0']), 'filtration': ('NN', ['F', 'IH0', 'L', 'T', 'R', 'EY1', 'SH', 'AH0', 'N']), 'finning': ('VBG', ['F', 'IH1', 'N', 'IH0', 'NG']), 'fin': ('NN', ['F', 'IH1', 'N']), 'final': ('JJ', ['F', 'AY1', 'N', 'AH0', 'L']), 'finale': ('NN', ['F', 'AH0', 'N', 'AE1', 'L', 'IY0']), 'finality': ('NN', ['F', 'AY0', 'N', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'finally': ('RB', ['F', 'AY1', 'N', 'AH0', 'L', 'IY0']), 'finance': ('NN', ['F', 'AH0', 'N', 'AE1', 'N', 'S']), 'financial': ('JJ', ['F', 'AH0', 'N', 'AE1', 'N', 'SH', 'AH0', 'L']), 'financially': ('RB', ['F', 'AH0', 'N', 'AE1', 'N', 'SH', 'AH0', 'L', 'IY0']), 'financier': ('NN', ['F', 'IH2', 'N', 'AH0', 'N', 'S', 'IH1', 'R']), 'fishes': ('NNS', ['F', 'IH1', 'SH', 'AH0', 'Z']), 'finch': ('NN', ['F', 'IH1', 'N', 'CH']), 'found': ('NN', ['F', 'AW1', 'N', 'D']), 'finding': ('VBG', ['F', 'AY1', 'N', 'D', 'IH0', 'NG']), 'find': ('VB', ['F', 'AY1', 'N', 'D']), 'finder': ('NN', ['F', 'AY1', 'N', 'D', 'ER0']), 'fine': ('NN', ['F', 'AY1', 'N']), 'fined': ('VBN', ['F', 'AY1', 'N', 'D']), 'fining': ('VBG', ['F', 'AY1', 'N', 'IH0', 'NG']), 'finely': ('RB', ['F', 'AY1', 'N', 'L', 'IY0']), 'finer': ('NN', ['F', 'AY1', 'N', 'ER0']), 'finery': ('NN', ['F', 'AY1', 'N', 'ER0', 'IY0']), 'finesse': ('NN', ['F', 'IH0', 'N', 'EH1', 'S']), 'finessed': ('VBN', ['F', 'IH0', 'N', 'EH1', 'S', 'T']), 'finger': ('NN', ['F', 'IH1', 'NG', 'G', 'ER0']), 'fingered': ('VBN', ['F', 'IH1', 'NG', 'G', 'ER0', 'D']), 'fingering': ('VBG', ['F', 'IH1', 'NG', 'G', 'ER0', 'IH0', 'NG']), 'finicky': ('NN', ['F', 'IH1', 'N', 'AH0', 'K', 'IY0']), 'finished': ('VBN', ['F', 'IH1', 'N', 'IH0', 'SH', 'T']), 'finishing': ('VBG', ['F', 'IH1', 'N', 'IH0', 'SH', 'IH0', 'NG']), 'finish': ('NN', ['F', 'IH1', 'N', 'IH0', 'SH']), 'finisher': ('NN', ['F', 'IH1', 'N', 'IH0', 'SH', 'ER0']), 'finite': ('NN', ['F', 'AY1', 'N', 'AY2', 'T']), 'finn': ('NN', ['F', 'IH1', 'N']), 'finnish': ('JJ', ['F', 'IH1', 'N', 'IH0', 'SH']), 'finns': ('NN', ['F', 'IH1', 'N', 'Z']), 'fipple': ('NN', ['F', 'IH1', 'P', 'AH0', 'L']), 'fir': ('NN', ['F', 'ER1']), 'fire': ('NN', ['F', 'AY1', 'ER0']), 'fired': ('VBN', ['F', 'AY1', 'ER0', 'D']), 'firearm': ('NN', ['F', 'AY1', 'ER0', 'AA2', 'R', 'M']), 'fireball': ('NN', ['F', 'AY1', 'ER0', 'B', 'AO2', 'L']), 'firebird': ('NN', ['F', 'AY1', 'ER0', 'B', 'ER2', 'D']), 'firebrand': ('NN', ['F', 'AY1', 'ER0', 'B', 'R', 'AE2', 'N', 'D']), 'firecracker': ('NN', ['F', 'AY1', 'ER0', 'K', 'R', 'AE2', 'K', 'ER0']), 'fireflies': ('NNS', ['F', 'AY1', 'ER0', 'F', 'L', 'AY2', 'Z']), 'firefly': ('NN', ['F', 'AY1', 'ER0', 'F', 'L', 'AY2']), 'firemen': ('NNS', ['F', 'AY1', 'R', 'M', 'AH0', 'N']), 'fireman': ('NN', ['F', 'AY1', 'R', 'M', 'AH0', 'N']), 'fireplace': ('NN', ['F', 'AY1', 'ER0', 'P', 'L', 'EY2', 'S']), 'fireproof': ('NN', ['F', 'AY1', 'ER0', 'P', 'R', 'UW2', 'F']), 'fireside': ('NN', ['F', 'AY1', 'ER0', 'S', 'AY2', 'D']), 'firestone': ('NN', ['F', 'AY1', 'R', 'S', 'T', 'OW2', 'N']), 'fireweed': ('NN', ['F', 'AY1', 'ER0', 'W', 'IY2', 'D']), 'firewood': ('NN', ['F', 'AY1', 'ER0', 'W', 'UH2', 'D']), 'firework': ('NN', ['F', 'AY1', 'R', 'W', 'ER2', 'K']), 'firing': ('VBG', ['F', 'AY1', 'R', 'IH0', 'NG']), 'firm': ('NN', ['F', 'ER1', 'M']), 'firmans': ('NNS', ['F', 'ER1', 'M', 'AH0', 'N', 'Z']), 'firman': ('NN', ['F', 'ER1', 'M', 'AH0', 'N']), 'firmly': ('RB', ['F', 'ER1', 'M', 'L', 'IY0']), 'firmness': ('NN', ['F', 'ER1', 'M', 'N', 'AH0', 'S']), 'firms': ('NNS', ['F', 'ER1', 'M', 'Z']), 'first': ('RB', ['F', 'ER1', 'S', 'T']), 'firstborn': ('NN', ['F', 'ER1', 'S', 'T', 'B', 'AO1', 'R', 'N']), 'firstly': ('RB', ['F', 'ER1', 'S', 'T', 'L', 'IY0']), 'firth': ('NN', ['F', 'ER1', 'TH']), 'fisc': ('NN', ['F', 'IH1', 'S', 'K']), 'fiscal': ('JJ', ['F', 'IH1', 'S', 'K', 'AH0', 'L']), 'fish': ('NN', ['F', 'IH1', 'SH']), 'fished': ('VBN', ['F', 'IH1', 'SH', 'T']), 'fishing': ('NN', ['F', 'IH1', 'SH', 'IH0', 'NG']), 'fisher': ('NN', ['F', 'IH1', 'SH', 'ER0']), 'fishermen': ('NNS', ['F', 'IH1', 'SH', 'ER0', 'M', 'IH0', 'N']), 'fisherman': ('NN', ['F', 'IH1', 'SH', 'ER0', 'M', 'AE2', 'N']), 'fisheries': ('NNS', ['F', 'IH1', 'SH', 'ER0', 'IY0', 'Z']), 'fishery': ('NN', ['F', 'IH1', 'SH', 'ER0', 'IY0']), 'fishmonger': ('NN', ['F', 'IH1', 'SH', 'M', 'AA2', 'NG', 'G', 'ER0']), 'fishy': ('NN', ['F', 'IH1', 'SH', 'IY0']), 'fisk': ('NN', ['F', 'IH1', 'S', 'K']), 'fissile': ('NN', ['F', 'IH1', 'S', 'AH0', 'L']), 'fission': ('NN', ['F', 'IH1', 'SH', 'AH0', 'N']), 'fissure': ('NN', ['F', 'IH1', 'SH', 'ER0']), 'fist': ('NN', ['F', 'IH1', 'S', 'T']), 'fisted': ('VBN', ['F', 'IH1', 'S', 'T', 'IH0', 'D']), 'fisticuff': ('NN', ['F', 'IH1', 'S', 'T', 'IH0', 'K', 'AH2', 'F']), 'fit': ('NN', ['F', 'IH1', 'T']), 'fitted': ('VBN', ['F', 'IH1', 'T', 'AH0', 'D']), 'fitting': ('NN', ['F', 'IH1', 'T', 'IH0', 'NG']), 'fitch': ('NN', ['F', 'IH1', 'CH']), 'fitful': ('NN', ['F', 'IH1', 'T', 'F', 'AH0', 'L']), 'fitness': ('NN', ['F', 'IH1', 'T', 'N', 'AH0', 'S']), 'fitt': ('NN', ['F', 'IH1', 'T']), 'fitter': ('NN', ['F', 'IH1', 'T', 'ER0']), 'fitz': ('NN', ['F', 'IH1', 'T', 'S']), 'five': ('CD', ['F', 'AY1', 'V']), 'fivefold': ('NN', ['F', 'AY1', 'V', 'F', 'OW2', 'L', 'D']), 'fives': ('NNS', ['F', 'AY1', 'V', 'Z']), 'fix': ('NN', ['F', 'IH1', 'K', 'S']), 'fixed': ('VBN', ['F', 'IH1', 'K', 'S', 'T']), 'fixing': ('VBG', ['F', 'IH1', 'K', 'S', 'IH0', 'NG']), 'fixable': ('JJ', ['F', 'IH1', 'K', 'S', 'AH0', 'B', 'AH0', 'L']), 'fixation': ('NN', ['F', 'IH0', 'K', 'S', 'EY1', 'SH', 'AH0', 'N']), 'fixative': ('JJ', ['F', 'IH1', 'K', 'S', 'AH0', 'T', 'IH0', 'V']), 'fixture': ('NN', ['F', 'IH1', 'K', 'S', 'CH', 'ER0']), 'fizz': ('NN', ['F', 'IH1', 'Z']), 'fizzled': ('VBN', ['F', 'IH1', 'Z', 'AH0', 'L', 'D']), 'fizzling': ('VBG', ['F', 'IH1', 'Z', 'AH0', 'L', 'IH0', 'NG']), 'fizzle': ('NN', ['F', 'IH1', 'Z', 'AH0', 'L']), 'fjord': ('NN', ['F', 'Y', 'AO1', 'R', 'D']), 'flabbergast': ('NN', ['F', 'L', 'AE1', 'B', 'ER0', 'G', 'AE2', 'S', 'T']), 'flabby': ('NN', ['F', 'L', 'AE1', 'B', 'IY0']), 'flaccid': ('NN', ['F', 'L', 'AE1', 'K', 'S', 'IH0', 'D']), 'flagged': ('VBN', ['F', 'L', 'AE1', 'G', 'D']), 'flagging': ('VBG', ['F', 'L', 'AE1', 'G', 'IH0', 'NG']), 'flag': ('NN', ['F', 'L', 'AE1', 'G']), 'flagellated': ('VBN', ['F', 'L', 'AE1', 'JH', 'AH0', 'L', 'EY2', 'T', 'AH0', 'D']), 'flagellate': ('NN', ['F', 'L', 'AE1', 'JH', 'AH0', 'L', 'EY2', 'T']), 'flagella': ('NN', ['F', 'L', 'AH0', 'JH', 'EH1', 'L', 'AH0']), 'flagellum': ('NN', ['F', 'L', 'AH0', 'JH', 'EH1', 'L', 'AH0', 'M']), 'flageolet': ('NN', ['F', 'L', 'AE2', 'JH', 'AH0', 'L', 'EH1', 'T']), 'flagmen': ('NNS', ['F', 'L', 'AE1', 'G', 'M', 'EH2', 'N']), 'flagman': ('NN', ['F', 'L', 'AE1', 'G', 'M', 'AE2', 'N']), 'flagrant': ('NN', ['F', 'L', 'EY1', 'G', 'R', 'AH0', 'N', 'T']), 'flagrantly': ('RB', ['F', 'L', 'EY1', 'G', 'R', 'AH0', 'N', 'T', 'L', 'IY0']), 'flagship': ('NN', ['F', 'L', 'AE1', 'G', 'SH', 'IH2', 'P']), 'flagstaff': ('NN', ['F', 'L', 'AE1', 'G', 'S', 'T', 'AE2', 'F']), 'flagstone': ('NN', ['F', 'L', 'AE1', 'G', 'S', 'T', 'OW2', 'N']), 'flail': ('NN', ['F', 'L', 'EY1', 'L']), 'flake': ('NN', ['F', 'L', 'EY1', 'K']), 'flaking': ('VBG', ['F', 'L', 'EY1', 'K', 'IH0', 'NG']), 'flaky': ('NN', ['F', 'L', 'EY1', 'K', 'IY0']), 'flam': ('NN', ['F', 'L', 'AE1', 'M']), 'flamboyant': ('NN', ['F', 'L', 'AE0', 'M', 'B', 'OY1', 'AH0', 'N', 'T']), 'flame': ('NN', ['F', 'L', 'EY1', 'M']), 'flamed': ('VBN', ['F', 'L', 'EY1', 'M', 'D']), 'flaming': ('VBG', ['F', 'L', 'EY1', 'M', 'IH0', 'NG']), 'flamingo': ('NN', ['F', 'L', 'AH0', 'M', 'IH1', 'NG', 'G', 'OW0']), 'flammability': ('NN', ['F', 'L', 'AE2', 'M', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'flammable': ('JJ', ['F', 'L', 'AE1', 'M', 'AH0', 'B', 'AH0', 'L']), 'flange': ('NN', ['F', 'L', 'AE1', 'N', 'JH']), 'flank': ('NN', ['F', 'L', 'AE1', 'NG', 'K']), 'flanked': ('VBN', ['F', 'L', 'AE1', 'NG', 'K', 'T']), 'flanking': ('VBG', ['F', 'L', 'AE1', 'NG', 'K', 'IH0', 'NG']), 'flannel': ('NNS', ['F', 'L', 'AE1', 'N', 'AH0', 'L']), 'flanneled': ('VBN', ['F', 'L', 'AE1', 'N', 'AH0', 'L', 'D']), 'flap': ('NN', ['F', 'L', 'AE1', 'P']), 'flapped': ('VBD', ['F', 'L', 'AE1', 'P', 'T']), 'flapping': ('VBG', ['F', 'L', 'AE1', 'P', 'IH0', 'NG']), 'flapjack': ('NN', ['F', 'L', 'AE1', 'P', 'JH', 'AE2', 'K']), 'flapper': ('NN', ['F', 'L', 'AE1', 'P', 'ER0']), 'flared': ('VBN', ['F', 'L', 'EH1', 'R', 'D']), 'flaring': ('VBG', ['F', 'L', 'EH1', 'R', 'IH0', 'NG']), 'flare': ('NN', ['F', 'L', 'EH1', 'R']), 'flaringly': ('RB', ['F', 'L', 'EH1', 'R', 'IH0', 'NG', 'L', 'IY0']), 'flashed': ('VBN', ['F', 'L', 'AE1', 'SH', 'T']), 'flashing': ('VBG', ['F', 'L', 'AE1', 'SH', 'IH0', 'NG']), 'flash': ('NN', ['F', 'L', 'AE1', 'SH']), 'flashes': ('NNS', ['F', 'L', 'AE1', 'SH', 'IH0', 'Z']), 'flasher': ('NN', ['F', 'L', 'AE1', 'SH', 'ER0']), 'flashy': ('NN', ['F', 'L', 'AE1', 'SH', 'IY0']), 'flask': ('NN', ['F', 'L', 'AE1', 'S', 'K']), 'flat': ('JJ', ['F', 'L', 'AE1', 'T']), 'flatboat': ('NN', ['F', 'L', 'AE1', 'T', 'B', 'OW2', 'T']), 'flatfish': ('JJ', ['F', 'L', 'AE1', 'T', 'F', 'IH2', 'SH']), 'flathead': ('NN', ['F', 'L', 'AE1', 'T', 'HH', 'EH2', 'D']), 'flatly': ('RB', ['F', 'L', 'AE1', 'T', 'L', 'IY0']), 'flatness': ('NN', ['F', 'L', 'AE1', 'T', 'N', 'AH0', 'S']), 'flattened': ('VBN', ['F', 'L', 'AE1', 'T', 'AH0', 'N', 'D']), 'flattening': ('VBG', ['F', 'L', 'AE1', 'T', 'AH0', 'N', 'IH0', 'NG']), 'flatten': ('NNS', ['F', 'L', 'AE1', 'T', 'AH0', 'N']), 'flatter': ('NN', ['F', 'L', 'AE1', 'T', 'ER0']), 'flattered': ('VBN', ['F', 'L', 'AE1', 'T', 'ER0', 'D']), 'flattering': ('VBG', ['F', 'L', 'AE1', 'T', 'ER0', 'IH0', 'NG']), 'flattery': ('NN', ['F', 'L', 'AE1', 'T', 'ER0', 'IY0']), 'flattish': ('JJ', ['F', 'L', 'AE1', 'T', 'IH0', 'SH']), 'flatulent': ('NN', ['F', 'L', 'AE1', 'CH', 'AH0', 'L', 'AH0', 'N', 'T']), 'flatware': ('NN', ['F', 'L', 'AE1', 'T', 'W', 'EH2', 'R']), 'flatworm': ('NN', ['F', 'L', 'AE1', 'T', 'W', 'ER0', 'M']), 'flaunted': ('VBN', ['F', 'L', 'AO1', 'N', 'T', 'IH0', 'D']), 'flaunting': ('VBG', ['F', 'L', 'AO1', 'N', 'T', 'IH0', 'NG']), 'flaunt': ('NN', ['F', 'L', 'AO1', 'N', 'T']), 'flavin': ('NN', ['F', 'L', 'EY1', 'V', 'IH0', 'N']), 'flavor': ('NN', ['F', 'L', 'EY1', 'V', 'ER0']), 'flavored': ('VBN', ['F', 'L', 'EY1', 'V', 'ER0', 'D']), 'flavoring': ('VBG', ['F', 'L', 'EY1', 'V', 'ER0', 'IH0', 'NG']), 'flaw': ('NN', ['F', 'L', 'AO1']), 'flawed': ('VBN', ['F', 'L', 'AO1', 'D']), 'flawless': ('NN', ['F', 'L', 'AO1', 'L', 'AH0', 'S']), 'flawn': ('NN', ['F', 'L', 'AO1', 'N']), 'flax': ('NN', ['F', 'L', 'AE1', 'K', 'S']), 'flaxseed': ('NN', ['F', 'L', 'AE1', 'K', 'S', 'IY2', 'D']), 'flayed': ('NNS', ['F', 'L', 'EY1', 'D']), 'flay': ('NN', ['F', 'L', 'EY1']), 'flea': ('NN', ['F', 'L', 'IY1']), 'fleck': ('NN', ['F', 'L', 'EH1', 'K']), 'fled': ('VBN', ['F', 'L', 'EH1', 'D']), 'fledge': ('NN', ['F', 'L', 'EH1', 'JH']), 'fledged': ('VBN', ['F', 'L', 'EH1', 'JH', 'D']), 'fledging': ('VBG', ['F', 'L', 'EH1', 'JH', 'IH0', 'NG']), 'fleeing': ('VBG', ['F', 'L', 'IY1', 'IH0', 'NG']), 'flee': ('NN', ['F', 'L', 'IY1']), 'fleece': ('NN', ['F', 'L', 'IY1', 'S']), 'fleeced': ('VBN', ['F', 'L', 'IY1', 'S', 'T']), 'fleecing': ('VBG', ['F', 'L', 'IY1', 'S', 'IH0', 'NG']), 'fleecy': ('NN', ['F', 'L', 'IY1', 'S', 'IY0']), 'fleer': ('NN', ['F', 'L', 'IH1', 'R']), 'fleeting': ('NN', ['F', 'L', 'IY1', 'T', 'IH0', 'NG']), 'fleet': ('NN', ['F', 'L', 'IY1', 'T']), 'fleetingly': ('RB', ['F', 'L', 'IY1', 'T', 'IH0', 'NG', 'L', 'IY0']), 'fleming': ('VBG', ['F', 'L', 'EH1', 'M', 'IH0', 'NG']), 'flemish': ('JJ', ['F', 'L', 'EH1', 'M', 'IH0', 'SH']), 'flesh': ('NN', ['F', 'L', 'EH1', 'SH']), 'fleshed': ('VBN', ['F', 'L', 'EH1', 'SH', 'T']), 'flesher': ('NN', ['F', 'L', 'EH1', 'SH', 'ER0']), 'fleshy': ('NN', ['F', 'L', 'EH1', 'SH', 'IY0']), 'fletcher': ('NN', ['F', 'L', 'EH1', 'CH', 'ER0']), 'fleury': ('NN', ['F', 'L', 'UH1', 'R', 'IY0']), 'flew': ('NN', ['F', 'L', 'UW1']), 'flexed': ('NN', ['F', 'L', 'EH1', 'K', 'S', 'T']), 'flexing': ('VBG', ['F', 'L', 'EH1', 'K', 'S', 'IH0', 'NG']), 'flex': ('NN', ['F', 'L', 'EH1', 'K', 'S']), 'flexibility': ('NN', ['F', 'L', 'EH2', 'K', 'S', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'flexible': ('JJ', ['F', 'L', 'EH1', 'K', 'S', 'AH0', 'B', 'AH0', 'L']), 'flexion': ('NN', ['F', 'L', 'EH1', 'K', 'SH', 'AH0', 'N']), 'flicking': ('VBG', ['F', 'L', 'IH1', 'K', 'IH0', 'NG']), 'flick': ('NN', ['F', 'L', 'IH1', 'K']), 'flickered': ('VBN', ['F', 'L', 'IH1', 'K', 'ER0', 'D']), 'flickering': ('VBG', ['F', 'L', 'IH1', 'K', 'ER0', 'IH0', 'NG']), 'flicker': ('NN', ['F', 'L', 'IH1', 'K', 'ER0']), 'flier': ('NN', ['F', 'L', 'AY1', 'ER0']), 'flight': ('NN', ['F', 'L', 'AY1', 'T']), 'flighty': ('NN', ['F', 'L', 'AY1', 'T', 'IY0']), 'flimflam': ('NN', ['F', 'L', 'IH1', 'M', 'F', 'L', 'AE2', 'M']), 'flimsy': ('NN', ['F', 'L', 'IH1', 'M', 'Z', 'IY0']), 'flinched': ('VBN', ['F', 'L', 'IH1', 'N', 'CH', 'T']), 'flinching': ('VBG', ['F', 'L', 'IH1', 'N', 'CH', 'IH0', 'NG']), 'flinch': ('NN', ['F', 'L', 'IH1', 'N', 'CH']), 'flinders': ('NNS', ['F', 'L', 'IH1', 'N', 'D', 'ER0', 'Z']), 'flung': ('NN', ['F', 'L', 'AH1', 'NG']), 'flinging': ('VBG', ['F', 'L', 'IH1', 'NG', 'IH0', 'NG']), 'fling': ('VBG', ['F', 'L', 'IH1', 'NG']), 'flint': ('NN', ['F', 'L', 'IH1', 'N', 'T']), 'flintlock': ('NN', ['F', 'L', 'IH1', 'N', 'T', 'L', 'AA2', 'K']), 'flinty': ('NN', ['F', 'L', 'IH1', 'N', 'T', 'IY0']), 'flip': ('NN', ['F', 'L', 'IH1', 'P']), 'flipped': ('VBD', ['F', 'L', 'IH1', 'P', 'T']), 'flipping': ('VBG', ['F', 'L', 'IH1', 'P', 'IH0', 'NG']), 'flippant': ('NN', ['F', 'L', 'IH1', 'P', 'AH0', 'N', 'T']), 'flipper': ('NN', ['F', 'L', 'IH1', 'P', 'ER0']), 'flirted': ('VBN', ['F', 'L', 'ER1', 'T', 'IH0', 'D']), 'flirting': ('VBG', ['F', 'L', 'ER1', 'T', 'IH0', 'NG']), 'flirt': ('NN', ['F', 'L', 'ER1', 'T']), 'flirtation': ('NN', ['F', 'L', 'ER0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'flitting': ('VBG', ['F', 'L', 'IH1', 'T', 'IH0', 'NG']), 'flit': ('NN', ['F', 'L', 'IH1', 'T']), 'flo': ('NN', ['F', 'L', 'OW1']), 'float': ('NN', ['F', 'L', 'OW1', 'T']), 'floated': ('VBN', ['F', 'L', 'OW1', 'T', 'AH0', 'D']), 'floating': ('VBG', ['F', 'L', 'OW1', 'T', 'IH0', 'NG']), 'floater': ('NN', ['F', 'L', 'OW1', 'T', 'ER0']), 'flock': ('NN', ['F', 'L', 'AA1', 'K']), 'flocked': ('VBN', ['F', 'L', 'AA1', 'K', 'T']), 'flocking': ('VBG', ['F', 'L', 'AA1', 'K', 'IH0', 'NG']), 'floe': ('NN', ['F', 'L', 'OW1']), 'flogging': ('VBG', ['F', 'L', 'AA1', 'G', 'IH0', 'NG']), 'flog': ('NN', ['F', 'L', 'AA1', 'G']), 'flood': ('NN', ['F', 'L', 'AH1', 'D']), 'flooded': ('VBN', ['F', 'L', 'AH1', 'D', 'AH0', 'D']), 'flooding': ('VBG', ['F', 'L', 'AH1', 'D', 'IH0', 'NG']), 'flook': ('NN', ['F', 'L', 'UH1', 'K']), 'floor': ('NN', ['F', 'L', 'AO1', 'R']), 'floored': ('VBN', ['F', 'L', 'AO1', 'R', 'D']), 'flooring': ('VBG', ['F', 'L', 'AO1', 'R', 'IH0', 'NG']), 'flopped': ('VBD', ['F', 'L', 'AA1', 'P', 'T']), 'flopping': ('VBG', ['F', 'L', 'AA1', 'P', 'IH0', 'NG']), 'flop': ('NN', ['F', 'L', 'AA1', 'P']), 'floppy': ('JJ', ['F', 'L', 'AA1', 'P', 'IY0']), 'flora': ('NNS', ['F', 'L', 'AO1', 'R', 'AH0']), 'floral': ('JJ', ['F', 'L', 'AO1', 'R', 'AH0', 'L']), 'florally': ('RB', ['F', 'L', 'AO1', 'R', 'AH0', 'L', 'IY0']), 'floren': ('NNS', ['F', 'L', 'AO1', 'R', 'AH0', 'N']), 'florence': ('NN', ['F', 'L', 'AO1', 'R', 'AH0', 'N', 'S']), 'florentine': ('NN', ['F', 'L', 'AO1', 'R', 'AH0', 'N', 'T', 'IY2', 'N']), 'florid': ('NN', ['F', 'L', 'AO1', 'R', 'AH0', 'D']), 'florin': ('NN', ['F', 'L', 'AO1', 'R', 'IH0', 'N']), 'florist': ('NN', ['F', 'L', 'AA1', 'R', 'AH0', 'S', 'T']), 'floss': ('NN', ['F', 'L', 'AA1', 'S']), 'flotation': ('NN', ['F', 'L', 'OW0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'flotilla': ('NN', ['F', 'L', 'OW0', 'T', 'IH1', 'L', 'AH0']), 'flotsam': ('NN', ['F', 'L', 'AA1', 'T', 'S', 'AH0', 'M']), 'flounce': ('NN', ['F', 'L', 'AW1', 'N', 'S']), 'flounder': ('NN', ['F', 'L', 'AW1', 'N', 'D', 'ER0']), 'floundered': ('VBN', ['F', 'L', 'AW1', 'N', 'D', 'ER0', 'D']), 'floundering': ('VBG', ['F', 'L', 'AW1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'flour': ('NN', ['F', 'L', 'AW1', 'ER0']), 'flourished': ('VBN', ['F', 'L', 'ER1', 'IH0', 'SH', 'T']), 'flourishing': ('VBG', ['F', 'L', 'ER1', 'IH0', 'SH', 'IH0', 'NG']), 'flourish': ('JJ', ['F', 'L', 'ER1', 'IH0', 'SH']), 'flourishes': ('NNS', ['F', 'L', 'ER1', 'IH0', 'SH', 'AH0', 'Z']), 'flouted': ('VBN', ['F', 'L', 'AW1', 'T', 'IH0', 'D']), 'flouting': ('VBG', ['F', 'L', 'AW1', 'T', 'IH0', 'NG']), 'flout': ('NN', ['F', 'L', 'AW1', 'T']), 'flow': ('NN', ['F', 'L', 'OW1']), 'flowed': ('VBN', ['F', 'L', 'OW1', 'D']), 'flowing': ('VBG', ['F', 'L', 'OW1', 'IH0', 'NG']), 'flower': ('NN', ['F', 'L', 'AW1', 'ER0']), 'flowered': ('VBN', ['F', 'L', 'AW1', 'ER0', 'D']), 'flowering': ('VBG', ['F', 'L', 'AW1', 'ER0', 'IH0', 'NG']), 'flowerpot': ('NN', ['F', 'L', 'AW1', 'ER0', 'P', 'AA2', 'T']), 'flowery': ('NN', ['F', 'L', 'AW1', 'ER0', 'IY0']), 'flown': ('NN', ['F', 'L', 'OW1', 'N']), 'fluctuated': ('VBN', ['F', 'L', 'AH1', 'K', 'CH', 'AH0', 'W', 'EY2', 'T', 'IH0', 'D']), 'fluctuating': ('VBG', ['F', 'L', 'AH1', 'K', 'CH', 'AH0', 'W', 'EY2', 'T', 'IH0', 'NG']), 'fluctuate': ('NN', ['F', 'L', 'AH1', 'K', 'CH', 'AH0', 'W', 'EY2', 'T']), 'fluctuation': ('NN', ['F', 'L', 'AH2', 'K', 'CH', 'UW0', 'EY1', 'SH', 'AH0', 'N']), 'flue': ('NN', ['F', 'L', 'UW1']), 'fluency': ('NN', ['F', 'L', 'UW1', 'AH0', 'N', 'S', 'IY0']), 'fluent': ('NN', ['F', 'L', 'UW1', 'AH0', 'N', 'T']), 'fluently': ('RB', ['F', 'L', 'UW1', 'AH0', 'N', 'T', 'L', 'IY0']), 'fluff': ('NN', ['F', 'L', 'AH1', 'F']), 'fluffy': ('NN', ['F', 'L', 'AH1', 'F', 'IY0']), 'fluid': ('NN', ['F', 'L', 'UW1', 'AH0', 'D']), 'fluke': ('NN', ['F', 'L', 'UW1', 'K']), 'flume': ('NN', ['F', 'L', 'UW1', 'M']), 'flunked': ('VBN', ['F', 'L', 'AH1', 'NG', 'K', 'T']), 'flunking': ('VBG', ['F', 'L', 'AH1', 'NG', 'K', 'IH0', 'NG']), 'flunk': ('NN', ['F', 'L', 'AH1', 'NG', 'K']), 'fluor': ('NN', ['F', 'L', 'UW1', 'ER0']), 'fluorescent': ('NN', ['F', 'L', 'UH2', 'R', 'EH1', 'S', 'AH0', 'N', 'T']), 'fluoride': ('NN', ['F', 'L', 'UH1', 'R', 'AY2', 'D']), 'fluorine': ('NN', ['F', 'L', 'UH1', 'R', 'IY2', 'N']), 'fluorite': ('NN', ['F', 'L', 'UH1', 'R', 'AY2', 'T']), 'flurried': ('VBN', ['F', 'L', 'ER1', 'IY0', 'D']), 'flurries': ('NNS', ['F', 'L', 'ER1', 'IY0', 'Z']), 'flurry': ('NN', ['F', 'L', 'ER1', 'IY0']), 'flushed': ('VBN', ['F', 'L', 'AH1', 'SH', 'T']), 'flushing': ('VBG', ['F', 'L', 'AH1', 'SH', 'IH0', 'NG']), 'flush': ('NN', ['F', 'L', 'AH1', 'SH']), 'flustered': ('VBN', ['F', 'L', 'AH1', 'S', 'T', 'ER0', 'D']), 'flustering': ('VBG', ['F', 'L', 'AH1', 'S', 'T', 'ER0', 'IH0', 'NG']), 'fluster': ('NN', ['F', 'L', 'AH1', 'S', 'T', 'ER0']), 'flute': ('NN', ['F', 'L', 'UW1', 'T']), 'flutist': ('NN', ['F', 'L', 'UW1', 'T', 'IH0', 'S', 'T']), 'flutter': ('NN', ['F', 'L', 'AH1', 'T', 'ER0']), 'fluty': ('NN', ['F', 'L', 'UW1', 'T', 'IY0']), 'fluvial': ('JJ', ['F', 'L', 'UW1', 'V', 'IY0', 'AH0', 'L']), 'flux': ('NN', ['F', 'L', 'AH1', 'K', 'S']), 'fluxional': ('JJ', ['F', 'L', 'AH1', 'K', 'SH', 'AH0', 'N', 'AH0', 'L']), 'flying': ('VBG', ['F', 'L', 'AY1', 'IH0', 'NG']), 'fly': ('NN', ['F', 'L', 'AY1']), 'flies': ('NNS', ['F', 'L', 'AY1', 'Z']), 'flycatcher': ('NN', ['F', 'L', 'AY1', 'K', 'AE2', 'CH', 'ER0']), 'flyer': ('NN', ['F', 'L', 'AY1', 'ER0']), 'flytrap': ('NN', ['F', 'L', 'AY1', 'T', 'R', 'AE2', 'P']), 'foal': ('NN', ['F', 'OW1', 'L']), 'foaling': ('VBG', ['F', 'OW1', 'L', 'IH0', 'NG']), 'foam': ('NN', ['F', 'OW1', 'M']), 'foaming': ('VBG', ['F', 'OW1', 'M', 'IH0', 'NG']), 'foamy': ('NN', ['F', 'OW1', 'M', 'IY0']), 'fob': ('NN', ['F', 'AO1', 'B']), 'focal': ('JJ', ['F', 'OW1', 'K', 'AH0', 'L']), 'focuses': ('NNS', ['F', 'OW1', 'K', 'AH0', 'S', 'IH0', 'Z']), 'focus': ('NN', ['F', 'OW1', 'K', 'AH0', 'S']), 'focused': ('VBN', ['F', 'OW1', 'K', 'AH0', 'S', 'T']), 'focusing': ('VBG', ['F', 'OW1', 'K', 'AH0', 'S', 'IH0', 'NG']), 'fodder': ('NN', ['F', 'AA1', 'D', 'ER0']), 'foe': ('NN', ['F', 'OW1']), 'fog': ('NN', ['F', 'AA1', 'G']), 'foggy': ('NN', ['F', 'AA1', 'G', 'IY0']), 'fogy': ('NN', ['F', 'OW1', 'G', 'IY0']), 'foible': ('JJ', ['F', 'OY1', 'B', 'AH0', 'L']), 'foiled': ('VBN', ['F', 'OY1', 'L', 'D']), 'foiling': ('VBG', ['F', 'OY1', 'L', 'IH0', 'NG']), 'foil': ('NN', ['F', 'OY1', 'L']), 'foist': ('NN', ['F', 'OY1', 'S', 'T']), 'foisted': ('VBN', ['F', 'OY1', 'S', 'T', 'IH0', 'D']), 'foister': ('NN', ['F', 'OY1', 'S', 'T', 'ER0']), 'folded': ('VBN', ['F', 'OW1', 'L', 'D', 'AH0', 'D']), 'folding': ('VBG', ['F', 'OW1', 'L', 'D', 'IH0', 'NG']), 'fold': ('NN', ['F', 'OW1', 'L', 'D']), 'folder': ('NN', ['F', 'OW1', 'L', 'D', 'ER0']), 'foliage': ('NN', ['F', 'OW1', 'L', 'IH0', 'JH']), 'foliate': ('NN', ['F', 'OW1', 'L', 'IY0', 'EY2', 'T']), 'foliation': ('NN', ['F', 'OW2', 'L', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'folk': ('NN', ['F', 'OW1', 'K']), 'folks': ('NNS', ['F', 'OW1', 'K', 'S']), 'folklore': ('NN', ['F', 'OW1', 'K', 'L', 'AO2', 'R']), 'follicle': ('NN', ['F', 'AA1', 'L', 'AH0', 'K', 'AH0', 'L']), 'followed': ('VBD', ['F', 'AA1', 'L', 'OW0', 'D']), 'following': ('VBG', ['F', 'AA1', 'L', 'OW0', 'IH0', 'NG']), 'follow': ('VB', ['F', 'AA1', 'L', 'OW0']), 'follower': ('NN', ['F', 'AA1', 'L', 'OW0', 'ER0']), 'follies': ('NNS', ['F', 'AA1', 'L', 'IY0', 'Z']), 'folly': ('RB', ['F', 'AA1', 'L', 'IY0']), 'fomented': ('VBN', ['F', 'OW1', 'M', 'EH0', 'N', 'T', 'IH0', 'D']), 'fomenting': ('VBG', ['F', 'OW1', 'M', 'EH0', 'N', 'T', 'IH0', 'NG']), 'foment': ('NN', ['F', 'OW1', 'M', 'EH0', 'N', 'T']), 'fond': ('NN', ['F', 'AA1', 'N', 'D']), 'fondled': ('VBN', ['F', 'AO1', 'N', 'D', 'AH0', 'L', 'D']), 'fondling': ('VBG', ['F', 'AA1', 'N', 'D', 'AH0', 'L', 'IH0', 'NG']), 'fondle': ('NN', ['F', 'AO1', 'N', 'D', 'AH0', 'L']), 'fondly': ('RB', ['F', 'AA1', 'N', 'D', 'L', 'IY0']), 'fondness': ('NN', ['F', 'AA1', 'N', 'D', 'N', 'AH0', 'S']), 'fone': ('NN', ['F', 'OW1', 'N']), 'font': ('NN', ['F', 'AA1', 'N', 'T']), 'food': ('NN', ['F', 'UW1', 'D']), 'fool': ('NN', ['F', 'UW1', 'L']), 'fooled': ('VBN', ['F', 'UW1', 'L', 'D']), 'fooling': ('NN', ['F', 'UW1', 'L', 'IH0', 'NG']), 'foolery': ('NN', ['F', 'UW1', 'L', 'ER0', 'IY0']), 'foolhardy': ('NN', ['F', 'UW1', 'L', 'HH', 'AA2', 'R', 'D', 'IY0']), 'foolish': ('JJ', ['F', 'UW1', 'L', 'IH0', 'SH']), 'foolishly': ('RB', ['F', 'UW1', 'L', 'IH0', 'SH', 'L', 'IY0']), 'foolishness': ('NN', ['F', 'UW1', 'L', 'IH0', 'SH', 'N', 'AH0', 'S']), 'foot': ('NN', ['F', 'UH1', 'T']), 'footed': ('VBN', ['F', 'UH1', 'T', 'IH0', 'D']), 'footing': ('NN', ['F', 'UH1', 'T', 'IH0', 'NG']), 'football': ('NN', ['F', 'UH1', 'T', 'B', 'AO2', 'L']), 'footfall': ('NN', ['F', 'UH1', 'T', 'F', 'AO2', 'L']), 'foothill': ('NN', ['F', 'UH1', 'T', 'HH', 'IH2', 'L']), 'foothold': ('NN', ['F', 'UH1', 'T', 'HH', 'OW2', 'L', 'D']), 'footlight': ('NN', ['F', 'UH1', 'T', 'L', 'AY2', 'T']), 'footman': ('NN', ['F', 'UH1', 'T', 'M', 'AH0', 'N']), 'footnote': ('NN', ['F', 'UH1', 'T', 'N', 'OW2', 'T']), 'footpath': ('NN', ['F', 'UH1', 'T', 'P', 'AE2', 'TH']), 'footprint': ('NN', ['F', 'UH1', 'T', 'P', 'R', 'IH2', 'N', 'T']), 'footstep': ('NN', ['F', 'UH1', 'T', 'S', 'T', 'EH2', 'P']), 'fop': ('NN', ['F', 'AO1', 'P']), 'for': ('IN', ['F', 'AO1', 'R']), 'forage': ('NN', ['F', 'AO1', 'R', 'IH0', 'JH']), 'foraging': ('VBG', ['F', 'AO1', 'R', 'IH0', 'JH', 'IH0', 'NG']), 'foramen': ('NNS', ['F', 'ER0', 'EY1', 'M', 'AH0', 'N']), 'foray': ('NN', ['F', 'AO1', 'R', 'EY0']), 'forbade': ('NN', ['F', 'ER0', 'B', 'EY1', 'D']), 'forbearance': ('NN', ['F', 'AO0', 'R', 'B', 'EH1', 'R', 'AH0', 'N', 'S']), 'forbidden': ('NN', ['F', 'AO1', 'R', 'B', 'IH0', 'D', 'AH0', 'N']), 'forbid': ('NN', ['F', 'ER0', 'B', 'IH1', 'D']), 'forbidding': ('VBG', ['F', 'ER0', 'B', 'IH1', 'D', 'IH0', 'NG']), 'force': ('NN', ['F', 'AO1', 'R', 'S']), 'forced': ('VBN', ['F', 'AO1', 'R', 'S', 'T']), 'forcing': ('VBG', ['F', 'AO1', 'R', 'S', 'IH0', 'NG']), 'forceful': ('NN', ['F', 'AO1', 'R', 'S', 'F', 'AH0', 'L']), 'forceps': ('NNS', ['F', 'AO1', 'R', 'S', 'EH0', 'P', 'S']), 'forcible': ('JJ', ['F', 'AO1', 'R', 'S', 'AH0', 'B', 'AH0', 'L']), 'forcibly': ('RB', ['F', 'AO1', 'R', 'S', 'AH0', 'B', 'L', 'IY0']), 'ford': ('NN', ['F', 'AO1', 'R', 'D']), 'fore': ('NN', ['F', 'AO1', 'R']), 'forearm': ('NN', ['F', 'AO0', 'R', 'AA1', 'R', 'M']), 'forebear': ('NN', ['F', 'AO1', 'R', 'B', 'EH2', 'R']), 'foreboding': ('VBG', ['F', 'AO0', 'R', 'B', 'OW1', 'D', 'IH0', 'NG']), 'forebode': ('NN', ['F', 'AO0', 'R', 'B', 'OW1', 'D']), 'forebrain': ('NN', ['F', 'AO1', 'R', 'B', 'R', 'EY2', 'N']), 'forecast': ('NN', ['F', 'AO1', 'R', 'K', 'AE2', 'S', 'T']), 'forecaster': ('NN', ['F', 'AO1', 'R', 'K', 'AE2', 'S', 'T', 'ER0']), 'foreclosed': ('VBN', ['F', 'AO0', 'R', 'K', 'L', 'OW1', 'Z', 'D']), 'foreclosing': ('VBG', ['F', 'AO0', 'R', 'K', 'L', 'OW1', 'Z', 'IH0', 'NG']), 'foreclose': ('NN', ['F', 'AO0', 'R', 'K', 'L', 'OW1', 'Z']), 'foreclosure': ('NN', ['F', 'AO0', 'R', 'K', 'L', 'OW1', 'ZH', 'ER0']), 'forefather': ('NN', ['F', 'AO1', 'R', 'F', 'AA2', 'DH', 'ER0']), 'forefinger': ('NN', ['F', 'AO1', 'R', 'F', 'IH2', 'NG', 'G', 'ER0']), 'forefoot': ('NN', ['F', 'AO1', 'R', 'F', 'UH2', 'T']), 'foregone': ('NN', ['F', 'AO1', 'R', 'G', 'AO1', 'N']), 'foregoing': ('VBG', ['F', 'AO0', 'R', 'G', 'OW1', 'IH0', 'NG']), 'forego': ('NN', ['F', 'AO0', 'R', 'G', 'OW1']), 'foreground': ('NN', ['F', 'AO1', 'R', 'G', 'R', 'AW2', 'N', 'D']), 'forehand': ('NN', ['F', 'AO1', 'R', 'HH', 'AE2', 'N', 'D']), 'forehead': ('NN', ['F', 'AO1', 'R', 'HH', 'EH0', 'D']), 'foreign': ('JJ', ['F', 'AO1', 'R', 'AH0', 'N']), 'foreigner': ('NN', ['F', 'AO1', 'R', 'AH0', 'N', 'ER0']), 'foremen': ('NNS', ['F', 'AO1', 'R', 'M', 'AH0', 'N']), 'foreman': ('NN', ['F', 'AO1', 'R', 'M', 'AH0', 'N']), 'foremost': ('NN', ['F', 'AO1', 'R', 'M', 'OW2', 'S', 'T']), 'forensic': ('JJ', ['F', 'ER0', 'EH1', 'N', 'S', 'IH0', 'K']), 'forerunner': ('NN', ['F', 'AO1', 'R', 'AH2', 'N', 'ER0']), 'foresee': ('NN', ['F', 'AO0', 'R', 'S', 'IY1']), 'foreseen': ('NN', ['F', 'AO2', 'R', 'S', 'IY1', 'N']), 'foreshadow': ('NN', ['F', 'AO0', 'R', 'SH', 'AE1', 'D', 'OW0']), 'foresight': ('NN', ['F', 'AO1', 'R', 'S', 'AY2', 'T']), 'foreskin': ('NN', ['F', 'OW1', 'R', 'S', 'K', 'IH2', 'N']), 'forest': ('NN', ['F', 'AO1', 'R', 'AH0', 'S', 'T']), 'forestalled': ('VBN', ['F', 'AO2', 'R', 'S', 'T', 'AA1', 'L', 'D']), 'forestalling': ('VBG', ['F', 'AO2', 'R', 'S', 'T', 'AA1', 'L', 'IH0', 'NG']), 'forestall': ('NN', ['F', 'AO0', 'R', 'S', 'T', 'AO1', 'L']), 'forester': ('NN', ['F', 'AO1', 'R', 'AH0', 'S', 'T', 'ER0']), 'forestry': ('NN', ['F', 'AO1', 'R', 'AH0', 'S', 'T', 'R', 'IY0']), 'foretaste': ('NN', ['F', 'AO0', 'R', 'T', 'EY1', 'S', 'T']), 'foretold': ('NN', ['F', 'AO0', 'R', 'T', 'OW1', 'L', 'D']), 'foretelling': ('VBG', ['F', 'AO0', 'R', 'T', 'EH1', 'L', 'IH0', 'NG']), 'foretell': ('NN', ['F', 'AO0', 'R', 'T', 'EH1', 'L']), 'forethought': ('NN', ['F', 'AO1', 'R', 'TH', 'AO2', 'T']), 'forever': ('RB', ['F', 'ER0', 'EH1', 'V', 'ER0']), 'forewarned': ('VBN', ['F', 'AO0', 'R', 'W', 'AO1', 'R', 'N', 'D']), 'forewarning': ('VBG', ['F', 'AO0', 'R', 'W', 'AO1', 'R', 'N', 'IH0', 'NG']), 'forewarn': ('NN', ['F', 'AO0', 'R', 'W', 'AO1', 'R', 'N']), 'forewomen': ('NNS', ['F', 'AO1', 'R', 'W', 'IH0', 'M', 'EH0', 'N']), 'forewoman': ('NN', ['F', 'AO1', 'R', 'W', 'UW0', 'M', 'AH0', 'N']), 'foreword': ('NN', ['F', 'AO1', 'R', 'W', 'ER2', 'D']), 'forfeit': ('NN', ['F', 'AO1', 'R', 'F', 'IH0', 'T']), 'forfeited': ('VBN', ['F', 'AO1', 'R', 'F', 'IH0', 'T', 'IH0', 'D']), 'forfeiting': ('VBG', ['F', 'AO1', 'R', 'F', 'AH0', 'T', 'IH0', 'NG']), 'forfeiture': ('NN', ['F', 'AO1', 'R', 'F', 'AH0', 'CH', 'ER0']), 'forgave': ('NN', ['F', 'ER0', 'G', 'EY1', 'V']), 'forge': ('NN', ['F', 'AO1', 'R', 'JH']), 'forged': ('VBN', ['F', 'AO1', 'R', 'JH', 'D']), 'forging': ('VBG', ['F', 'AO1', 'R', 'JH', 'IH0', 'NG']), 'forger': ('NN', ['F', 'AO1', 'R', 'JH', 'ER0']), 'forgeries': ('NNS', ['F', 'AO1', 'R', 'JH', 'ER0', 'IY0', 'Z']), 'forgery': ('NN', ['F', 'AO1', 'R', 'JH', 'ER0', 'IY0']), 'forgot': ('NN', ['F', 'ER0', 'G', 'AA1', 'T']), 'forgotten': ('NNS', ['F', 'ER0', 'G', 'AA1', 'T', 'AH0', 'N']), 'forgetting': ('VBG', ['F', 'ER0', 'G', 'EH1', 'T', 'IH0', 'NG']), 'forget': ('NN', ['F', 'ER0', 'G', 'EH1', 'T']), 'forgetful': ('NN', ['F', 'AO0', 'R', 'G', 'EH1', 'T', 'F', 'AH0', 'L']), 'forgettable': ('JJ', ['F', 'AO0', 'R', 'G', 'EH1', 'T', 'AH0', 'B', 'AH0', 'L']), 'forgivable': ('JJ', ['F', 'AO0', 'R', 'G', 'IH1', 'V', 'AH0', 'B', 'AH0', 'L']), 'forgiven': ('RB', ['F', 'ER0', 'G', 'IH1', 'V', 'AH0', 'N']), 'forgiving': ('VBG', ['F', 'ER0', 'G', 'IH1', 'V', 'IH0', 'NG']), 'forgive': ('JJ', ['F', 'ER0', 'G', 'IH1', 'V']), 'forgiveness': ('NN', ['F', 'ER0', 'G', 'IH1', 'V', 'N', 'AH0', 'S']), 'forgone': ('NN', ['F', 'AO0', 'R', 'G', 'AA1', 'N']), 'forgoing': ('VBG', ['F', 'AO0', 'R', 'G', 'OW1', 'IH0', 'NG']), 'forgo': ('NN', ['F', 'AO0', 'R', 'G', 'OW1']), 'fork': ('NN', ['F', 'AO1', 'R', 'K']), 'forked': ('VBN', ['F', 'AO1', 'R', 'K', 'T']), 'forking': ('VBG', ['F', 'AO1', 'R', 'K', 'IH0', 'NG']), 'forlorn': ('NN', ['F', 'ER0', 'L', 'AO1', 'R', 'N']), 'form': ('NN', ['F', 'AO1', 'R', 'M']), 'formed': ('VBN', ['F', 'AO1', 'R', 'M', 'D']), 'forming': ('VBG', ['F', 'AO1', 'R', 'M', 'IH0', 'NG']), 'formal': ('JJ', ['F', 'AO1', 'R', 'M', 'AH0', 'L']), 'formaldehyde': ('NN', ['F', 'AO0', 'R', 'M', 'AE1', 'L', 'D', 'AH0', 'HH', 'AY2', 'D']), 'formalism': ('NN', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'formalist': ('NN', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'AH0', 'S', 'T']), 'formalities': ('NNS', ['F', 'AO0', 'R', 'M', 'AE1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'formality': ('NN', ['F', 'AO0', 'R', 'M', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'formalized': ('VBN', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'AY2', 'Z', 'D']), 'formalizing': ('VBG', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'formalize': ('VB', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'AY2', 'Z']), 'formally': ('RB', ['F', 'AO1', 'R', 'M', 'AH0', 'L', 'IY0']), 'formation': ('NN', ['F', 'AO0', 'R', 'M', 'EY1', 'SH', 'AH0', 'N']), 'formative': ('JJ', ['F', 'AO1', 'R', 'M', 'AH0', 'T', 'IH0', 'V']), 'former': ('JJ', ['F', 'AO1', 'R', 'M', 'ER0']), 'formerly': ('RB', ['F', 'AO1', 'R', 'M', 'ER0', 'L', 'IY0']), 'formic': ('JJ', ['F', 'AO1', 'R', 'M', 'IH0', 'K']), 'formica': ('NN', ['F', 'AO0', 'R', 'M', 'AY1', 'K', 'AH0']), 'formidable': ('JJ', ['F', 'AO1', 'R', 'M', 'AH0', 'D', 'AH0', 'B', 'AH0', 'L']), 'formidably': ('RB', ['F', 'AO1', 'R', 'M', 'AH0', 'D', 'AH0', 'B', 'L', 'IY0']), 'formulas': ('NNS', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'AH0', 'Z']), 'formula': ('NN', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'AH0']), 'formulary': ('JJ', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'EH2', 'R', 'IY0']), 'formulated': ('VBN', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'AH0', 'D']), 'formulating': ('VBG', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'formulate': ('NN', ['F', 'AO1', 'R', 'M', 'Y', 'AH0', 'L', 'EY2', 'T']), 'formulation': ('NN', ['F', 'AO2', 'R', 'M', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'forsook': ('NN', ['F', 'AO0', 'R', 'S', 'UH1', 'K']), 'forsaken': ('NN', ['F', 'AO0', 'R', 'S', 'EY1', 'K', 'AH0', 'N']), 'forsaking': ('VBG', ['F', 'AO0', 'R', 'S', 'EY1', 'K', 'IH0', 'NG']), 'forsake': ('NN', ['F', 'AO0', 'R', 'S', 'EY1', 'K']), 'forster': ('NN', ['F', 'AO1', 'R', 'S', 'T', 'ER0']), 'forsworn': ('NN', ['F', 'AO2', 'R', 'S', 'W', 'AO1', 'R', 'N']), 'forswear': ('NN', ['F', 'AO0', 'R', 'S', 'W', 'EH1', 'R']), 'forsythia': ('NN', ['F', 'AO0', 'R', 'S', 'IH1', 'TH', 'IY0', 'AH0']), 'fort': ('NN', ['F', 'AO1', 'R', 'T']), 'forte': ('NN', ['F', 'AO1', 'R', 'T', 'EY0']), 'forth': ('NN', ['F', 'AO1', 'R', 'TH']), 'forthcoming': ('VBG', ['F', 'AO1', 'R', 'TH', 'K', 'AH1', 'M', 'IH0', 'NG']), 'forthright': ('NN', ['F', 'AO1', 'R', 'TH', 'R', 'AY1', 'T']), 'forthrightness': ('NN', ['F', 'AO1', 'R', 'TH', 'R', 'AY1', 'T', 'N', 'AH0', 'S']), 'forthwith': ('NN', ['F', 'AO1', 'R', 'TH', 'W', 'IH1', 'TH']), 'forties': ('NNS', ['F', 'AO1', 'R', 'T', 'IY0', 'Z']), 'fortieth': ('NNS', ['F', 'AO1', 'R', 'T', 'IY0', 'IH0', 'TH']), 'fortification': ('NN', ['F', 'AO2', 'R', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'fortifier': ('NN', ['F', 'AO1', 'R', 'T', 'AH0', 'F', 'AY2', 'ER0']), 'fortified': ('VBN', ['F', 'AO1', 'R', 'T', 'AH0', 'F', 'AY2', 'D']), 'fortifying': ('VBG', ['F', 'AO1', 'R', 'T', 'IH0', 'F', 'AY2', 'IH0', 'NG']), 'fortify': ('NN', ['F', 'AO1', 'R', 'T', 'IH0', 'F', 'AY2']), 'fortin': ('NN', ['F', 'AO1', 'R', 'T', 'IH0', 'N']), 'fortitude': ('NN', ['F', 'AO1', 'R', 'T', 'IH0', 'T', 'UW2', 'D']), 'fortnight': ('NN', ['F', 'AO1', 'R', 'T', 'N', 'AY2', 'T']), 'fortnightly': ('RB', ['F', 'AO1', 'R', 'T', 'N', 'AY2', 'T', 'L', 'IY0']), 'fortresses': ('NNS', ['F', 'AO1', 'R', 'T', 'R', 'AH0', 'S', 'IH0', 'Z']), 'fortress': ('NN', ['F', 'AO1', 'R', 'T', 'R', 'AH0', 'S']), 'fortuitous': ('JJ', ['F', 'AO0', 'R', 'T', 'UW1', 'IH0', 'T', 'AH0', 'S']), 'fortunate': ('NN', ['F', 'AO1', 'R', 'CH', 'AH0', 'N', 'AH0', 'T']), 'fortunately': ('RB', ['F', 'AO1', 'R', 'CH', 'AH0', 'N', 'AH0', 'T', 'L', 'IY0']), 'fortune': ('NN', ['F', 'AO1', 'R', 'CH', 'AH0', 'N']), 'forty': ('NN', ['F', 'AO1', 'R', 'T', 'IY0']), 'forums': ('NNS', ['F', 'AO1', 'R', 'AH0', 'M', 'Z']), 'fora': ('NNS', ['F', 'AO1', 'R', 'AH0']), 'forum': ('NN', ['F', 'AO1', 'R', 'AH0', 'M']), 'forward': ('RB', ['F', 'AO1', 'R', 'W', 'ER0', 'D']), 'forwards': ('NNS', ['F', 'AO1', 'R', 'W', 'ER0', 'D', 'Z']), 'forwarded': ('VBN', ['F', 'AO1', 'R', 'W', 'ER0', 'D', 'IH0', 'D']), 'forwarding': ('VBG', ['F', 'AO1', 'R', 'W', 'ER0', 'D', 'IH0', 'NG']), 'forwarder': ('NN', ['F', 'AO1', 'R', 'W', 'ER0', 'D', 'ER0']), 'fosse': ('NN', ['F', 'AA1', 'S']), 'fossil': ('NN', ['F', 'AA1', 'S', 'AH0', 'L']), 'fossiliferous': ('JJ', ['F', 'AA2', 'S', 'AH0', 'L', 'IH1', 'F', 'ER0', 'AH0', 'S']), 'fossilized': ('VBN', ['F', 'AA1', 'S', 'AH0', 'L', 'AY2', 'Z', 'D']), 'fossilize': ('VB', ['F', 'AA1', 'S', 'AH0', 'L', 'AY2', 'Z']), 'fostered': ('VBN', ['F', 'AA1', 'S', 'T', 'ER0', 'D']), 'fostering': ('VBG', ['F', 'AA1', 'S', 'T', 'ER0', 'IH0', 'NG']), 'foster': ('NN', ['F', 'AA1', 'S', 'T', 'ER0']), 'foul': ('NN', ['F', 'AW1', 'L']), 'fouled': ('VBN', ['F', 'AW1', 'L', 'D']), 'fouling': ('VBG', ['F', 'AW1', 'L', 'IH0', 'NG']), 'foulness': ('NN', ['F', 'AW1', 'L', 'N', 'AH0', 'S']), 'founded': ('VBN', ['F', 'AW1', 'N', 'D', 'AH0', 'D']), 'founding': ('VBG', ['F', 'AW1', 'N', 'D', 'IH0', 'NG']), 'foundation': ('NN', ['F', 'AW0', 'N', 'D', 'EY1', 'SH', 'AH0', 'N']), 'founder': ('NN', ['F', 'AW1', 'N', 'D', 'ER0']), 'foundered': ('VBN', ['F', 'AW1', 'N', 'D', 'ER0', 'D']), 'foundering': ('VBG', ['F', 'AW1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'foundling': ('VBG', ['F', 'AW1', 'N', 'D', 'L', 'IH0', 'NG']), 'foundries': ('NNS', ['F', 'AW1', 'N', 'D', 'R', 'IY0', 'Z']), 'foundry': ('NN', ['F', 'AW1', 'N', 'D', 'R', 'IY0']), 'fountain': ('NN', ['F', 'AW1', 'N', 'T', 'AH0', 'N']), 'four': ('CD', ['F', 'AO1', 'R']), 'fourfold': ('NN', ['F', 'AO1', 'R', 'F', 'OW1', 'L', 'D']), 'foursquare': ('NN', ['F', 'AO1', 'R', 'S', 'K', 'W', 'EH1', 'R']), 'fourteen': ('NN', ['F', 'AO1', 'R', 'T', 'IY1', 'N']), 'fourteenth': ('NN', ['F', 'AO1', 'R', 'T', 'IY1', 'N', 'TH']), 'fourth': ('JJ', ['F', 'AO1', 'R', 'TH']), 'fourthly': ('RB', ['F', 'AO1', 'R', 'TH', 'L', 'IY0']), 'fouty': ('NN', ['F', 'AW1', 'T', 'IY0']), 'fowl': ('NN', ['F', 'AW1', 'L']), 'fowler': ('NN', ['F', 'AW1', 'L', 'ER0']), 'foxes': ('NNS', ['F', 'AA1', 'K', 'S', 'AH0', 'Z']), 'fox': ('NN', ['F', 'AA1', 'K', 'S']), 'fracas': ('NN', ['F', 'R', 'EY1', 'K', 'AH0', 'S']), 'foxglove': ('NN', ['F', 'AA1', 'K', 'S', 'G', 'L', 'AH2', 'V']), 'foxhound': ('NN', ['F', 'AA1', 'K', 'S', 'HH', 'AW2', 'N', 'D']), 'foxtail': ('NN', ['F', 'AA1', 'K', 'S', 'T', 'EY2', 'L']), 'foxy': ('NN', ['F', 'AA1', 'K', 'S', 'IY0']), 'foy': ('NN', ['F', 'OY1']), 'foyer': ('NN', ['F', 'OY1', 'ER0']), 'fraction': ('NN', ['F', 'R', 'AE1', 'K', 'SH', 'AH0', 'N']), 'fractional': ('JJ', ['F', 'R', 'AE1', 'K', 'SH', 'AH0', 'N', 'AH0', 'L']), 'fractionally': ('RB', ['F', 'R', 'AE1', 'K', 'SH', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'fractious': ('JJ', ['F', 'R', 'AE1', 'K', 'SH', 'AH0', 'S']), 'fracture': ('NN', ['F', 'R', 'AE1', 'K', 'CH', 'ER0']), 'fractured': ('VBN', ['F', 'R', 'AE1', 'K', 'CH', 'ER0', 'D']), 'fracturing': ('VBG', ['F', 'R', 'AE1', 'K', 'CH', 'ER0', 'IH0', 'NG']), 'fragile': ('NN', ['F', 'R', 'AE1', 'JH', 'AH0', 'L']), 'fragility': ('NN', ['F', 'R', 'AH0', 'JH', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'fragment': ('NN', ['F', 'R', 'AE1', 'G', 'M', 'AH0', 'N', 'T']), 'fragmentary': ('JJ', ['F', 'R', 'AE1', 'G', 'M', 'AH0', 'N', 'T', 'EH2', 'R', 'IY0']), 'fragmented': ('VBN', ['F', 'R', 'AE1', 'G', 'M', 'AH0', 'N', 'T', 'IH0', 'D']), 'fragrance': ('NN', ['F', 'R', 'EY1', 'G', 'R', 'AH0', 'N', 'S']), 'fragrant': ('NN', ['F', 'R', 'EY1', 'G', 'R', 'AH0', 'N', 'T']), 'frail': ('NN', ['F', 'R', 'EY1', 'L']), 'frailties': ('NNS', ['F', 'R', 'EY1', 'L', 'T', 'IY0', 'Z']), 'frailty': ('NN', ['F', 'R', 'EY1', 'L', 'T', 'IY0']), 'framed': ('VBN', ['F', 'R', 'EY1', 'M', 'D']), 'framing': ('VBG', ['F', 'R', 'EY1', 'M', 'IH0', 'NG']), 'frame': ('NN', ['F', 'R', 'EY1', 'M']), 'framer': ('NN', ['F', 'R', 'EY1', 'M', 'ER0']), 'framework': ('NN', ['F', 'R', 'EY1', 'M', 'W', 'ER2', 'K']), 'franc': ('NN', ['F', 'R', 'AE1', 'NG', 'K']), 'franchise': ('NN', ['F', 'R', 'AE1', 'N', 'CH', 'AY2', 'Z']), 'franchised': ('VBN', ['F', 'R', 'AE1', 'N', 'CH', 'AY0', 'Z', 'D']), 'franchising': ('VBG', ['F', 'R', 'AE1', 'N', 'CH', 'AY0', 'Z', 'IH0', 'NG']), 'franciscan': ('JJ', ['F', 'R', 'AE0', 'N', 'S', 'IH1', 'S', 'K', 'AH0', 'N']), 'frank': ('NN', ['F', 'R', 'AE1', 'NG', 'K']), 'franked': ('VBN', ['F', 'R', 'AE1', 'NG', 'K', 'T']), 'franking': ('NN', ['F', 'R', 'AE1', 'NG', 'K', 'IH0', 'NG']), 'frankincense': ('NN', ['F', 'R', 'AE1', 'NG', 'K', 'AH0', 'N', 'S', 'EH2', 'N', 'S']), 'frankish': ('JJ', ['F', 'R', 'AE1', 'NG', 'K', 'IH0', 'SH']), 'franklin': ('NN', ['F', 'R', 'AE1', 'NG', 'K', 'L', 'IH0', 'N']), 'franklinite': ('NN', ['F', 'R', 'AE1', 'NG', 'K', 'L', 'IH0', 'N', 'AY2', 'T']), 'frankly': ('RB', ['F', 'R', 'AE1', 'NG', 'K', 'L', 'IY0']), 'frankness': ('NN', ['F', 'R', 'AE1', 'NG', 'K', 'N', 'AH0', 'S']), 'frantic': ('JJ', ['F', 'R', 'AE1', 'N', 'T', 'IH0', 'K']), 'frap': ('NN', ['F', 'R', 'AE1', 'P']), 'frater': ('NN', ['F', 'R', 'EY1', 'T', 'ER0']), 'fraternal': ('JJ', ['F', 'R', 'AH0', 'T', 'ER1', 'N', 'AH0', 'L']), 'fraternities': ('NNS', ['F', 'R', 'AH0', 'T', 'ER1', 'N', 'AH0', 'T', 'IY0', 'Z']), 'fraternity': ('NN', ['F', 'R', 'AH0', 'T', 'ER1', 'N', 'AH0', 'T', 'IY0']), 'fraternizing': ('VBG', ['F', 'R', 'AE1', 'T', 'ER0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'fraternize': ('VB', ['F', 'R', 'AE1', 'T', 'ER0', 'N', 'AY2', 'Z']), 'fratricide': ('NN', ['F', 'R', 'AE1', 'T', 'R', 'AH0', 'S', 'AY2', 'D']), 'fraud': ('NN', ['F', 'R', 'AO1', 'D']), 'fraudulent': ('NN', ['F', 'R', 'AO1', 'JH', 'AH0', 'L', 'AH0', 'N', 'T']), 'fraudulently': ('RB', ['F', 'R', 'AO1', 'D', 'UW0', 'L', 'AH0', 'N', 'T', 'L', 'IY0']), 'fraught': ('NN', ['F', 'R', 'AO1', 'T']), 'fray': ('NN', ['F', 'R', 'EY1']), 'frayed': ('NNS', ['F', 'R', 'EY1', 'D']), 'fraying': ('VBG', ['F', 'R', 'EY1', 'IH0', 'NG']), 'freaked': ('VBN', ['F', 'R', 'IY1', 'K', 'T']), 'freaking': ('VBG', ['F', 'R', 'IY1', 'K', 'IH0', 'NG']), 'freak': ('NN', ['F', 'R', 'IY1', 'K']), 'freck': ('NN', ['F', 'R', 'EH1', 'K']), 'freckle': ('NN', ['F', 'R', 'EH1', 'K', 'AH0', 'L']), 'freckled': ('VBN', ['F', 'R', 'EH1', 'K', 'AH0', 'L', 'D']), 'fred': ('VBN', ['F', 'R', 'EH1', 'D']), 'free': ('JJ', ['F', 'R', 'IY1']), 'freed': ('NN', ['F', 'R', 'IY1', 'D']), 'freeing': ('VBG', ['F', 'R', 'IY1', 'IH0', 'NG']), 'freeborn': ('NN', ['F', 'R', 'IY1', 'B', 'ER0', 'N']), 'freedman': ('NN', ['F', 'R', 'IY1', 'D', 'M', 'AH0', 'N']), 'freedom': ('NN', ['F', 'R', 'IY1', 'D', 'AH0', 'M']), 'freehold': ('NN', ['F', 'R', 'IY1', 'HH', 'OW2', 'L', 'D']), 'freeholder': ('NN', ['F', 'R', 'IY1', 'HH', 'OW2', 'L', 'D', 'ER0']), 'freely': ('RB', ['F', 'R', 'IY1', 'L', 'IY0']), 'freemen': ('NNS', ['F', 'R', 'IY1', 'M', 'EH0', 'N']), 'freeman': ('NN', ['F', 'R', 'IY1', 'M', 'AH0', 'N']), 'freemason': ('NN', ['F', 'R', 'IY1', 'M', 'EY1', 'S', 'AH0', 'N']), 'freemasonry': ('NN', ['F', 'R', 'IY1', 'M', 'EY1', 'S', 'AH0', 'N', 'R', 'IY0']), 'freer': ('NN', ['F', 'R', 'IY1', 'ER0']), 'freestone': ('NN', ['F', 'R', 'IY1', 'S', 'T', 'OW2', 'N']), 'freethinker': ('NN', ['F', 'R', 'IY1', 'TH', 'IH1', 'NG', 'K', 'ER0']), 'freeze': ('NN', ['F', 'R', 'IY1', 'Z']), 'froze': ('NN', ['F', 'R', 'OW1', 'Z']), 'frozen': ('NNS', ['F', 'R', 'OW1', 'Z', 'AH0', 'N']), 'freezing': ('VBG', ['F', 'R', 'IY1', 'Z', 'IH0', 'NG']), 'freezer': ('NN', ['F', 'R', 'IY1', 'Z', 'ER0']), 'freight': ('NN', ['F', 'R', 'EY1', 'T']), 'freighter': ('NN', ['F', 'R', 'EY1', 'T', 'ER0']), 'fremd': ('NN', ['F', 'R', 'EH1', 'M', 'D']), 'french': ('JJ', ['F', 'R', 'EH1', 'N', 'CH']), 'frenchmen': ('NNS', ['F', 'R', 'EH1', 'N', 'CH', 'M', 'EH0', 'N']), 'frenchman': ('NN', ['F', 'R', 'EH1', 'N', 'CH', 'M', 'AE0', 'N']), 'frenzied': ('VBN', ['F', 'R', 'EH1', 'N', 'Z', 'IY0', 'D']), 'frenzies': ('NNS', ['F', 'R', 'EH1', 'N', 'Z', 'IY0', 'Z']), 'frenzy': ('NN', ['F', 'R', 'EH1', 'N', 'Z', 'IY0']), 'frequencies': ('NNS', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'S', 'IY0', 'Z']), 'frequency': ('NN', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'S', 'IY0']), 'frequent': ('NN', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'T']), 'frequented': ('VBN', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'T', 'IH0', 'D']), 'frequenting': ('VBG', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'T', 'IH0', 'NG']), 'frequently': ('RB', ['F', 'R', 'IY1', 'K', 'W', 'AH0', 'N', 'T', 'L', 'IY0']), 'frere': ('RB', ['F', 'R', 'EH1', 'R']), 'frescoes': ('NNS', ['F', 'R', 'EH1', 'S', 'K', 'OW0', 'Z']), 'fresco': ('NN', ['F', 'R', 'EH1', 'S', 'K', 'OW0']), 'frescoed': ('NN', ['F', 'R', 'EH1', 'S', 'K', 'OW0', 'D']), 'fresh': ('JJ', ['F', 'R', 'EH1', 'SH']), 'freshened': ('VBN', ['F', 'R', 'EH1', 'SH', 'AH0', 'N', 'D']), 'freshening': ('VBG', ['F', 'R', 'EH1', 'SH', 'AH0', 'N', 'IH0', 'NG']), 'freshen': ('NN', ['F', 'R', 'EH1', 'SH', 'AH0', 'N']), 'freshly': ('RB', ['F', 'R', 'EH1', 'SH', 'L', 'IY0']), 'freshmen': ('NNS', ['F', 'R', 'EH1', 'SH', 'M', 'IH0', 'N']), 'freshman': ('NN', ['F', 'R', 'EH1', 'SH', 'M', 'AH0', 'N']), 'freshness': ('NN', ['F', 'R', 'EH1', 'SH', 'N', 'AH0', 'S']), 'fret': ('NN', ['F', 'R', 'EH1', 'T']), 'fretted': ('VBN', ['F', 'R', 'EH1', 'T', 'IH0', 'D']), 'fretting': ('VBG', ['F', 'R', 'EH1', 'T', 'IH0', 'NG']), 'fretful': ('NN', ['F', 'R', 'EH1', 'T', 'F', 'AH0', 'L']), 'frett': ('NN', ['F', 'R', 'EH1', 'T']), 'fretter': ('NN', ['F', 'R', 'EH1', 'T', 'ER0']), 'freya': ('NN', ['F', 'R', 'EY1', 'AH0']), 'friar': ('NN', ['F', 'R', 'AY1', 'ER0']), 'friary': ('JJ', ['F', 'R', 'AY1', 'ER0', 'IY0']), 'frictionless': ('NN', ['F', 'R', 'IH1', 'K', 'SH', 'AH0', 'N', 'L', 'AH0', 'S']), 'friday': ('NN', ['F', 'R', 'AY1', 'D', 'IY0']), 'fridge': ('NN', ['F', 'R', 'IH1', 'JH']), 'fried': ('VBN', ['F', 'R', 'AY1', 'D']), 'friend': ('NN', ['F', 'R', 'EH1', 'N', 'D']), 'friendliness': ('NN', ['F', 'R', 'EH1', 'N', 'D', 'L', 'IY0', 'N', 'IH0', 'S']), 'friendly': ('RB', ['F', 'R', 'EH1', 'N', 'D', 'L', 'IY0']), 'friendship': ('NN', ['F', 'R', 'EH1', 'N', 'D', 'SH', 'IH0', 'P']), 'frier': ('NN', ['F', 'R', 'AY1', 'ER0']), 'friese': ('JJ', ['F', 'R', 'IY1', 'Z']), 'frieze': ('NN', ['F', 'R', 'IY1', 'Z']), 'frigate': ('NN', ['F', 'R', 'IH1', 'G', 'AH0', 'T']), 'fright': ('NN', ['F', 'R', 'AY1', 'T']), 'frightened': ('VBN', ['F', 'R', 'AY1', 'T', 'AH0', 'N', 'D']), 'frightening': ('VBG', ['F', 'R', 'AY1', 'T', 'AH0', 'N', 'IH0', 'NG']), 'frighten': ('NNS', ['F', 'R', 'AY1', 'T', 'AH0', 'N']), 'frightful': ('NN', ['F', 'R', 'AY1', 'T', 'F', 'AH0', 'L']), 'frightfully': ('RB', ['F', 'R', 'AY1', 'T', 'F', 'AH0', 'L', 'IY0']), 'frigid': ('NN', ['F', 'R', 'IH1', 'JH', 'AH0', 'D']), 'frill': ('NN', ['F', 'R', 'IH1', 'L']), 'fringe': ('NN', ['F', 'R', 'IH1', 'N', 'JH']), 'frisk': ('NN', ['F', 'R', 'IH1', 'S', 'K']), 'frisked': ('VBN', ['F', 'R', 'IH1', 'S', 'K', 'T']), 'frisky': ('NN', ['F', 'R', 'IH1', 'S', 'K', 'IY0']), 'frist': ('NN', ['F', 'R', 'IH1', 'S', 'T']), 'frith': ('NN', ['F', 'R', 'IH1', 'TH']), 'fritter': ('NN', ['F', 'R', 'IH1', 'T', 'ER0']), 'frittered': ('VBN', ['F', 'R', 'IH1', 'T', 'ER0', 'D']), 'frittering': ('VBG', ['F', 'R', 'IH1', 'T', 'ER0', 'IH0', 'NG']), 'frivolity': ('NN', ['F', 'R', 'AH0', 'V', 'AA1', 'L', 'AH0', 'T', 'IY0']), 'frivolous': ('JJ', ['F', 'R', 'IH1', 'V', 'AH0', 'L', 'AH0', 'S']), 'fro': ('NN', ['F', 'R', 'OW1']), 'frock': ('NN', ['F', 'R', 'AA1', 'K']), 'frog': ('NN', ['F', 'R', 'AA1', 'G']), 'frogmouth': ('NN', ['F', 'R', 'AA1', 'G', 'M', 'AW2', 'TH']), 'frolic': ('NN', ['F', 'R', 'AA1', 'L', 'IH0', 'K']), 'frolicking': ('VBG', ['F', 'R', 'AA1', 'L', 'IH0', 'K', 'IH0', 'NG']), 'from': ('IN', ['F', 'R', 'AH1', 'M']), 'frond': ('NN', ['F', 'R', 'AA1', 'N', 'D']), 'fronde': ('NN', ['F', 'R', 'AA1', 'N', 'D']), 'front': ('NN', ['F', 'R', 'AH1', 'N', 'T']), 'fronted': ('VBN', ['F', 'R', 'AH1', 'N', 'T', 'IH0', 'D']), 'fronting': ('VBG', ['F', 'R', 'AH1', 'N', 'T', 'IH0', 'NG']), 'frontage': ('NN', ['F', 'R', 'AH1', 'N', 'T', 'IH0', 'JH']), 'frontal': ('NN', ['F', 'R', 'AH1', 'N', 'T', 'AH0', 'L']), 'frontier': ('NN', ['F', 'R', 'AH0', 'N', 'T', 'IH1', 'R']), 'frost': ('NN', ['F', 'R', 'AO1', 'S', 'T']), 'frosting': ('VBG', ['F', 'R', 'AO1', 'S', 'T', 'IH0', 'NG']), 'frostbite': ('NN', ['F', 'R', 'AO1', 'S', 'T', 'B', 'AY2', 'T']), 'frosted': ('VBN', ['F', 'R', 'AO1', 'S', 'T', 'AH0', 'D']), 'frosty': ('NN', ['F', 'R', 'AO1', 'S', 'T', 'IY0']), 'froth': ('NN', ['F', 'R', 'AO1', 'TH']), 'frothiness': ('NN', ['F', 'R', 'AO1', 'TH', 'IY0', 'N', 'IH0', 'S']), 'frothy': ('NN', ['F', 'R', 'AO1', 'TH', 'IY0']), 'frowned': ('VBN', ['F', 'R', 'AW1', 'N', 'D']), 'frowning': ('VBG', ['F', 'R', 'AW1', 'N', 'IH0', 'NG']), 'frown': ('NN', ['F', 'R', 'AW1', 'N']), 'frowningly': ('RB', ['F', 'R', 'AW1', 'N', 'IH0', 'NG', 'L', 'IY0']), 'fructose': ('NN', ['F', 'R', 'AH1', 'K', 'T', 'OW2', 'S']), 'frugal': ('NN', ['F', 'R', 'UW1', 'G', 'AH0', 'L']), 'frugality': ('NN', ['F', 'R', 'UW0', 'G', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'frugally': ('RB', ['F', 'R', 'UW1', 'G', 'AH0', 'L', 'IY0']), 'fruit': ('NN', ['F', 'R', 'UW1', 'T']), 'fruitful': ('NN', ['F', 'R', 'UW1', 'T', 'F', 'AH0', 'L']), 'fruiting': ('VBG', ['F', 'R', 'UW1', 'T', 'IH0', 'NG']), 'fruition': ('NN', ['F', 'R', 'UW0', 'IH1', 'SH', 'AH0', 'N']), 'fruitless': ('NN', ['F', 'R', 'UW1', 'T', 'L', 'AH0', 'S']), 'frump': ('NN', ['F', 'R', 'AH1', 'M', 'P']), 'frush': ('NN', ['F', 'R', 'AH1', 'SH']), 'frustrate': ('NN', ['F', 'R', 'AH1', 'S', 'T', 'R', 'EY2', 'T']), 'frustrated': ('VBN', ['F', 'R', 'AH1', 'S', 'T', 'R', 'EY2', 'T', 'AH0', 'D']), 'frustrating': ('VBG', ['F', 'R', 'AH1', 'S', 'T', 'R', 'EY2', 'T', 'IH0', 'NG']), 'frustration': ('NN', ['F', 'R', 'AH0', 'S', 'T', 'R', 'EY1', 'SH', 'AH0', 'N']), 'frying': ('VBG', ['F', 'R', 'AY1', 'IH0', 'NG']), 'fry': ('NN', ['F', 'R', 'AY1']), 'fuchs': ('NN', ['F', 'Y', 'UW1', 'K', 'S']), 'fuchsias': ('NN', ['F', 'Y', 'UW1', 'SH', 'AH0', 'Z']), 'fudge': ('NN', ['F', 'AH1', 'JH']), 'fudged': ('VBN', ['F', 'AH1', 'JH', 'D']), 'fudging': ('VBG', ['F', 'AH1', 'JH', 'IH0', 'NG']), 'fuel': ('NN', ['F', 'Y', 'UW1', 'AH0', 'L']), 'fugitive': ('JJ', ['F', 'Y', 'UW1', 'JH', 'AH0', 'T', 'IH0', 'V']), 'fugue': ('NN', ['F', 'Y', 'UW1', 'G']), 'fulcrum': ('NN', ['F', 'UH1', 'L', 'K', 'R', 'AH0', 'M']), 'fulfilled': ('VBN', ['F', 'UH0', 'L', 'F', 'IH1', 'L', 'D']), 'fulfilling': ('VBG', ['F', 'UH0', 'L', 'F', 'IH1', 'L', 'IH0', 'NG']), 'fulfill': ('NN', ['F', 'UH0', 'L', 'F', 'IH1', 'L']), 'fulfillment': ('NN', ['F', 'UH0', 'L', 'F', 'IH1', 'L', 'M', 'AH0', 'N', 'T']), 'full': ('JJ', ['F', 'UH1', 'L']), 'fullam': ('NN', ['F', 'UH1', 'L', 'AH0', 'M']), 'fuller': ('NN', ['F', 'UH1', 'L', 'ER0']), 'fullness': ('NN', ['F', 'UH1', 'L', 'N', 'AH0', 'S']), 'fully': ('RB', ['F', 'UH1', 'L', 'IY0']), 'fulminate': ('NN', ['F', 'UH1', 'L', 'M', 'AH0', 'N', 'EY2', 'T']), 'fulsome': ('NN', ['F', 'UH1', 'L', 'S', 'AH0', 'M']), 'fumbled': ('VBN', ['F', 'AH1', 'M', 'B', 'AH0', 'L', 'D']), 'fumbling': ('VBG', ['F', 'AH1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'fumble': ('JJ', ['F', 'AH1', 'M', 'B', 'AH0', 'L']), 'fume': ('NN', ['F', 'Y', 'UW1', 'M']), 'fumed': ('VBN', ['F', 'Y', 'UW1', 'M', 'D']), 'fuming': ('VBG', ['F', 'Y', 'UW1', 'M', 'IH0', 'NG']), 'fumigate': ('NN', ['F', 'Y', 'UW1', 'M', 'AH0', 'G', 'EY2', 'T']), 'fumigation': ('NN', ['F', 'Y', 'UW2', 'M', 'AH0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'fun': ('NN', ['F', 'AH1', 'N']), 'function': ('NN', ['F', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N']), 'functional': ('JJ', ['F', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N', 'AH0', 'L']), 'functionally': ('RB', ['F', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'functionaries': ('NNS', ['F', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N', 'EH2', 'R', 'IY0', 'Z']), 'functionary': ('JJ', ['F', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N', 'EH2', 'R', 'IY0']), 'fund': ('NN', ['F', 'AH1', 'N', 'D']), 'funded': ('VBN', ['F', 'AH1', 'N', 'D', 'AH0', 'D']), 'funding': ('NN', ['F', 'AH1', 'N', 'D', 'IH0', 'NG']), 'fundamental': ('JJ', ['F', 'AH2', 'N', 'D', 'AH0', 'M', 'EH1', 'N', 'T', 'AH0', 'L']), 'fundamentally': ('RB', ['F', 'AH2', 'N', 'D', 'AH0', 'M', 'EH1', 'N', 'T', 'AH0', 'L', 'IY0']), 'funeral': ('JJ', ['F', 'Y', 'UW1', 'N', 'ER0', 'AH0', 'L']), 'fungal': ('NN', ['F', 'AH1', 'NG', 'G', 'AH0', 'L']), 'fungi': ('NNS', ['F', 'AH1', 'N', 'JH', 'AY0']), 'fungicide': ('NN', ['F', 'AH1', 'N', 'JH', 'AH0', 'S', 'AY2', 'D']), 'fungus': ('NN', ['F', 'AH1', 'NG', 'G', 'AH0', 'S']), 'funk': ('NN', ['F', 'AH1', 'NG', 'K']), 'funky': ('NN', ['F', 'AH1', 'NG', 'K', 'IY0']), 'funnel': ('NNS', ['F', 'AH1', 'N', 'AH0', 'L']), 'funny': ('NN', ['F', 'AH1', 'N', 'IY0']), 'fur': ('NN', ['F', 'ER1']), 'furbished': ('VBN', ['F', 'ER1', 'B', 'IH0', 'SH', 'T']), 'furbishing': ('VBG', ['F', 'ER1', 'B', 'IH0', 'SH', 'IH0', 'NG']), 'furbish': ('JJ', ['F', 'ER1', 'B', 'IH0', 'SH']), 'furious': ('JJ', ['F', 'Y', 'UH1', 'R', 'IY0', 'AH0', 'S']), 'furlong': ('NN', ['F', 'ER1', 'L', 'AO2', 'NG']), 'furlough': ('NN', ['F', 'ER1', 'L', 'OW0']), 'furloughed': ('VBN', ['F', 'ER1', 'L', 'OW0', 'D']), 'furnace': ('NN', ['F', 'ER1', 'N', 'AH0', 'S']), 'furnished': ('VBN', ['F', 'ER1', 'N', 'IH0', 'SH', 'T']), 'furnishing': ('VBG', ['F', 'ER1', 'N', 'IH0', 'SH', 'IH0', 'NG']), 'furnish': ('JJ', ['F', 'ER1', 'N', 'IH0', 'SH']), 'furniture': ('NN', ['F', 'ER1', 'N', 'IH0', 'CH', 'ER0']), 'furrier': ('NN', ['F', 'ER1', 'IY0', 'ER0']), 'furrow': ('NN', ['F', 'ER1', 'OW0']), 'furrowed': ('VBN', ['F', 'ER1', 'OW0', 'D']), 'furry': ('NN', ['F', 'ER1', 'IY0']), 'further': ('RB', ['F', 'ER1', 'DH', 'ER0']), 'furthered': ('VBN', ['F', 'ER1', 'DH', 'ER0', 'D']), 'furthering': ('VBG', ['F', 'ER1', 'DH', 'ER0', 'IH0', 'NG']), 'furtherance': ('NN', ['F', 'ER1', 'TH', 'ER0', 'AH0', 'N', 'S']), 'furthermore': ('RB', ['F', 'ER1', 'DH', 'ER0', 'M', 'AO2', 'R']), 'furthest': ('NN', ['F', 'ER1', 'TH', 'AH0', 'S', 'T']), 'furtive': ('JJ', ['F', 'ER1', 'T', 'IH0', 'V']), 'furtively': ('RB', ['F', 'ER1', 'T', 'IH0', 'V', 'L', 'IY0']), 'fury': ('NN', ['F', 'Y', 'UH1', 'R', 'IY0']), 'fused': ('VBN', ['F', 'Y', 'UW1', 'Z', 'D']), 'fusing': ('VBG', ['F', 'Y', 'UW1', 'Z', 'IH0', 'NG']), 'fuse': ('NN', ['F', 'Y', 'UW1', 'Z']), 'fusible': ('JJ', ['F', 'Y', 'UW1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'fusillade': ('NN', ['F', 'Y', 'UW1', 'S', 'IH0', 'L', 'EY2', 'D']), 'fusion': ('NN', ['F', 'Y', 'UW1', 'ZH', 'AH0', 'N']), 'fuss': ('NN', ['F', 'AH1', 'S']), 'fussed': ('VBN', ['F', 'AH1', 'S', 'T']), 'fussing': ('VBG', ['F', 'AH1', 'S', 'IH0', 'NG']), 'fussy': ('NN', ['F', 'AH1', 'S', 'IY0']), 'fust': ('NN', ['F', 'AH1', 'S', 'T']), 'futile': ('NN', ['F', 'Y', 'UW1', 'T', 'AH0', 'L']), 'futility': ('NN', ['F', 'Y', 'UW0', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'future': ('NN', ['F', 'Y', 'UW1', 'CH', 'ER0']), 'futurist': ('NN', ['F', 'Y', 'UW1', 'CH', 'ER0', 'IH0', 'S', 'T']), 'fuzz': ('NN', ['F', 'AH1', 'Z']), 'fuzzy': ('NN', ['F', 'AH1', 'Z', 'IY0']), 'fyke': ('NN', ['F', 'AY1', 'K']), 'g': ('NN', ['JH', 'IY1']), 'gab': ('NN', ['G', 'AE1', 'B']), 'gabardine': ('NN', ['G', 'AE1', 'B', 'ER0', 'D', 'IY2', 'N']), 'gabbro': ('NN', ['G', 'AE1', 'B', 'R', 'OW0']), 'gabel': ('NN', ['G', 'AH0', 'B', 'EH1', 'L']), 'gabert': ('NN', ['G', 'AE1', 'B', 'ER0', 'T']), 'gable': ('JJ', ['G', 'EY1', 'B', 'AH0', 'L']), 'gaby': ('NN', ['G', 'AE1', 'B', 'IY0']), 'gad': ('NN', ['G', 'AE1', 'D']), 'gadding': ('VBG', ['G', 'AE1', 'D', 'IH0', 'NG']), 'gade': ('NN', ['G', 'EY1', 'D']), 'gadflies': ('NNS', ['G', 'AE1', 'D', 'F', 'L', 'AY2', 'Z']), 'gadfly': ('NN', ['G', 'AE1', 'D', 'F', 'L', 'AY2']), 'gael': ('NN', ['G', 'EY1', 'L']), 'gaelic': ('NN', ['G', 'EY1', 'L', 'IH0', 'K']), 'gaff': ('NN', ['G', 'AE1', 'F']), 'gagged': ('VBN', ['G', 'AE1', 'G', 'D']), 'gag': ('NN', ['G', 'AE1', 'G']), 'gage': ('NN', ['G', 'EY1', 'JH']), 'gager': ('NN', ['G', 'EY1', 'G', 'ER0']), 'gaggle': ('NN', ['G', 'AE1', 'G', 'AH0', 'L']), 'gaillard': ('NN', ['G', 'EY1', 'L', 'ER0', 'D']), 'gaily': ('RB', ['G', 'EY1', 'L', 'IY0']), 'gain': ('NN', ['G', 'EY1', 'N']), 'gained': ('VBN', ['G', 'EY1', 'N', 'D']), 'gaining': ('VBG', ['G', 'EY1', 'N', 'IH0', 'NG']), 'gainer': ('NN', ['G', 'EY1', 'N', 'ER0']), 'gainful': ('NN', ['G', 'EY1', 'N', 'F', 'AH0', 'L']), 'gainsay': ('NN', ['G', 'EY1', 'N', 'S', 'EY2']), 'gait': ('NN', ['G', 'EY1', 'T']), 'gaiter': ('NN', ['G', 'EY1', 'T', 'ER0']), 'gala': ('NN', ['G', 'AE1', 'L', 'AH0']), 'galactic': ('JJ', ['G', 'AH0', 'L', 'AE1', 'K', 'T', 'IH0', 'K']), 'galactose': ('NN', ['G', 'AH0', 'L', 'AE1', 'K', 'T', 'OW0', 'S']), 'galaxies': ('NNS', ['G', 'AE1', 'L', 'AH0', 'K', 'S', 'IY0', 'Z']), 'galaxy': ('NN', ['G', 'AE1', 'L', 'AH0', 'K', 'S', 'IY0']), 'galban': ('NN', ['G', 'AE1', 'L', 'B', 'AH0', 'N']), 'gale': ('NN', ['G', 'EY1', 'L']), 'galea': ('NN', ['G', 'EY1', 'L', 'IY0', 'AH0']), 'galena': ('NN', ['G', 'AH0', 'L', 'IY1', 'N', 'AH0']), 'galenical': ('JJ', ['G', 'AH0', 'L', 'EH1', 'N', 'IH0', 'K', 'AH0', 'L']), 'galilean': ('NN', ['G', 'AE2', 'L', 'AH0', 'L', 'IY1', 'AH0', 'N']), 'galilee': ('NN', ['G', 'AE1', 'L', 'AH0', 'L', 'IY2']), 'gall': ('NN', ['G', 'AO1', 'L']), 'galling': ('VBG', ['G', 'AO1', 'L', 'IH0', 'NG']), 'gallant': ('NN', ['G', 'AE1', 'L', 'AH0', 'N', 'T']), 'gallantly': ('RB', ['G', 'AE1', 'L', 'AH0', 'N', 'T', 'L', 'IY0']), 'gallantry': ('NN', ['G', 'AE1', 'L', 'AH0', 'N', 'T', 'R', 'IY0']), 'gallego': ('NN', ['G', 'AA0', 'L', 'EH1', 'G', 'OW0']), 'galleon': ('NN', ['G', 'AE1', 'L', 'IY0', 'AH0', 'N']), 'galleries': ('NNS', ['G', 'AE1', 'L', 'ER0', 'IY0', 'Z']), 'gallery': ('NN', ['G', 'AE1', 'L', 'ER0', 'IY0']), 'galleys': ('NNS', ['G', 'AE1', 'L', 'IY0', 'Z']), 'galley': ('NN', ['G', 'AE1', 'L', 'IY0']), 'gallic': ('NN', ['G', 'AE1', 'L', 'IH0', 'K']), 'gallium': ('NN', ['G', 'AE1', 'L', 'IY0', 'AH0', 'M']), 'gallon': ('NN', ['G', 'AE1', 'L', 'AH0', 'N']), 'galloon': ('NN', ['G', 'AH0', 'L', 'UW1', 'N']), 'galloped': ('NNS', ['G', 'AE1', 'L', 'AH0', 'P', 'T']), 'galloping': ('VBG', ['G', 'AE1', 'L', 'AH0', 'P', 'IH0', 'NG']), 'gallop': ('NN', ['G', 'AE1', 'L', 'AH0', 'P']), 'gallow': ('NN', ['G', 'AE1', 'L', 'OW0']), 'galloway': ('NN', ['G', 'AE1', 'L', 'OW0', 'W', 'EY2']), 'gallows': ('NNS', ['G', 'AE1', 'L', 'OW0', 'Z']), 'gallstone': ('NN', ['G', 'AO1', 'L', 'S', 'T', 'OW2', 'N']), 'galore': ('NN', ['G', 'AH0', 'L', 'AO1', 'R']), 'galt': ('NN', ['G', 'AO1', 'L', 'T']), 'galvanic': ('NN', ['G', 'AE0', 'L', 'V', 'AE1', 'N', 'IH0', 'K']), 'galvanized': ('VBN', ['G', 'AE1', 'L', 'V', 'AH0', 'N', 'AY2', 'Z', 'D']), 'galvanizing': ('VBG', ['G', 'AE1', 'L', 'V', 'AH0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'galvanize': ('VB', ['G', 'AE1', 'L', 'V', 'AH0', 'N', 'AY2', 'Z']), 'gamba': ('NN', ['G', 'AE1', 'M', 'B', 'AH0']), 'gambit': ('NN', ['G', 'AE1', 'M', 'B', 'IH0', 'T']), 'gambled': ('VBN', ['G', 'AE1', 'M', 'B', 'AH0', 'L', 'D']), 'gambling': ('VBG', ['G', 'AE1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'gamble': ('NN', ['G', 'AE1', 'M', 'B', 'AH0', 'L']), 'gambler': ('NN', ['G', 'AE1', 'M', 'B', 'L', 'ER0']), 'gambrel': ('NN', ['G', 'AE1', 'M', 'B', 'R', 'AH0', 'L']), 'game': ('NN', ['G', 'EY1', 'M']), 'gaming': ('VBG', ['G', 'EY1', 'M', 'IH0', 'NG']), 'gamecock': ('NN', ['G', 'EY1', 'M', 'K', 'AO2', 'K']), 'gamekeeper': ('NN', ['G', 'EY1', 'M', 'K', 'IY2', 'P', 'ER0']), 'gamely': ('RB', ['G', 'EY1', 'M', 'L', 'IY0']), 'gamma': ('NN', ['G', 'AE1', 'M', 'AH0']), 'gammon': ('NN', ['G', 'AE1', 'M', 'AH0', 'N']), 'gamut': ('NN', ['G', 'AE1', 'M', 'AH0', 'T']), 'gan': ('NN', ['G', 'AE1', 'N']), 'gander': ('NN', ['G', 'AE1', 'N', 'D', 'ER0']), 'gang': ('NN', ['G', 'AE1', 'NG']), 'ganger': ('NN', ['G', 'AE1', 'NG', 'ER0']), 'ganglia': ('NNS', ['G', 'AE1', 'NG', 'G', 'L', 'IY0', 'AH0']), 'ganglionic': ('NN', ['G', 'AE2', 'NG', 'G', 'L', 'IY0', 'AA1', 'N', 'IH0', 'K']), 'gangrene': ('NN', ['G', 'AE1', 'N', 'G', 'R', 'IY0', 'N']), 'gantlet': ('NN', ['G', 'AO1', 'N', 'T', 'L', 'AH0', 'T']), 'gantry': ('NN', ['G', 'AE1', 'N', 'T', 'R', 'IY0']), 'gap': ('NN', ['G', 'AE1', 'P']), 'gaping': ('VBG', ['G', 'EY1', 'P', 'IH0', 'NG']), 'gape': ('NN', ['G', 'EY1', 'P']), 'gar': ('NN', ['G', 'AA1', 'R']), 'garb': ('NN', ['G', 'AA1', 'R', 'B']), 'garbage': ('NN', ['G', 'AA1', 'R', 'B', 'IH0', 'JH']), 'garbed': ('NN', ['G', 'AA1', 'R', 'B', 'D']), 'garbled': ('VBN', ['G', 'AA1', 'R', 'B', 'AH0', 'L', 'D']), 'garbling': ('VBG', ['G', 'AA1', 'R', 'B', 'L', 'IH0', 'NG']), 'garble': ('JJ', ['G', 'AA1', 'R', 'B', 'AH0', 'L']), 'gard': ('NN', ['G', 'AA1', 'R', 'D']), 'garden': ('NN', ['G', 'AA1', 'R', 'D', 'AH0', 'N']), 'gardened': ('VBN', ['G', 'AA1', 'R', 'D', 'AH0', 'N', 'D']), 'gardening': ('NN', ['G', 'AA1', 'R', 'D', 'AH0', 'N', 'IH0', 'NG']), 'gardener': ('NN', ['G', 'AA1', 'R', 'D', 'AH0', 'N', 'ER0']), 'gardenia': ('NN', ['G', 'AA0', 'R', 'D', 'IY1', 'N', 'Y', 'AH0']), 'gare': ('NN', ['G', 'EH1', 'R']), 'gargantuan': ('NN', ['G', 'AA0', 'R', 'G', 'AE1', 'N', 'CH', 'UW0', 'AH0', 'N']), 'gargoyle': ('NN', ['G', 'AA1', 'R', 'G', 'OY2', 'L']), 'garibaldi': ('NN', ['G', 'AE2', 'R', 'AH0', 'B', 'AO1', 'L', 'D', 'IY0']), 'garish': ('NN', ['G', 'EH1', 'R', 'IH0', 'SH']), 'garland': ('NN', ['G', 'AA1', 'R', 'L', 'AH0', 'N', 'D']), 'garlic': ('NN', ['G', 'AA1', 'R', 'L', 'IH0', 'K']), 'garlicky': ('NN', ['G', 'AA1', 'R', 'L', 'IH0', 'K', 'IY0']), 'garment': ('NN', ['G', 'AA1', 'R', 'M', 'AH0', 'N', 'T']), 'garner': ('NN', ['G', 'AA1', 'R', 'N', 'ER0']), 'garnered': ('VBN', ['G', 'AA1', 'R', 'N', 'ER0', 'D']), 'garnering': ('VBG', ['G', 'AA1', 'R', 'N', 'ER0', 'IH0', 'NG']), 'garnet': ('NN', ['G', 'AA1', 'R', 'N', 'AH0', 'T']), 'garnished': ('VBN', ['G', 'AA1', 'R', 'N', 'IH0', 'SH', 'T']), 'garnish': ('NN', ['G', 'AA1', 'R', 'N', 'IH0', 'SH']), 'garnishment': ('NN', ['G', 'AA1', 'R', 'N', 'IH0', 'SH', 'M', 'AH0', 'N', 'T']), 'garret': ('NN', ['G', 'EH1', 'R', 'IH0', 'T']), 'garrison': ('NN', ['G', 'AE1', 'R', 'IH0', 'S', 'AH0', 'N']), 'garrisoned': ('VBN', ['G', 'AE1', 'R', 'AH0', 'S', 'AH0', 'N', 'D']), 'garron': ('NN', ['G', 'AE1', 'R', 'AH0', 'N']), 'garrulous': ('JJ', ['G', 'EH1', 'R', 'AH0', 'L', 'AH0', 'S']), 'garter': ('NN', ['G', 'AA1', 'R', 'T', 'ER0']), 'garth': ('NN', ['G', 'AA1', 'R', 'TH']), 'garvie': ('NN', ['G', 'AA1', 'R', 'V', 'IY0']), 'gases': ('NNS', ['G', 'AE1', 'S', 'AH0', 'Z']), 'gas': ('NN', ['G', 'AE1', 'S']), 'gascon': ('NN', ['G', 'AE1', 'S', 'K', 'AH0', 'N']), 'gaseous': ('JJ', ['G', 'AE1', 'S', 'IY0', 'AH0', 'S']), 'gashed': ('VBN', ['G', 'AE1', 'SH', 'T']), 'gash': ('NN', ['G', 'AE1', 'SH']), 'gasification': ('NN', ['G', 'AE2', 'S', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'gasify': ('NN', ['G', 'AE2', 'S', 'AH0', 'F', 'AY0']), 'gasket': ('NN', ['G', 'AE1', 'S', 'K', 'AH0', 'T']), 'gaskins': ('NNS', ['G', 'AE1', 'S', 'K', 'IH0', 'N', 'Z']), 'gaslight': ('NN', ['G', 'AE1', 'S', 'L', 'AY0', 'T']), 'gasoline': ('NN', ['G', 'AE1', 'S', 'AH0', 'L', 'IY2', 'N']), 'gasped': ('NNS', ['G', 'AE1', 'S', 'P', 'T']), 'gasping': ('VBG', ['G', 'AE1', 'S', 'P', 'IH0', 'NG']), 'gasp': ('NN', ['G', 'AE1', 'S', 'P']), 'gassing': ('VBG', ['G', 'AE1', 'S', 'IH0', 'NG']), 'gast': ('NN', ['G', 'AE1', 'S', 'T']), 'gaster': ('NN', ['G', 'AE1', 'S', 'T', 'ER0']), 'gastric': ('NN', ['G', 'AE1', 'S', 'T', 'R', 'IH0', 'K']), 'gastritis': ('NN', ['G', 'AE0', 'S', 'T', 'R', 'AY1', 'T', 'AH0', 'S']), 'gastrointestinal': ('NN', ['G', 'AE2', 'S', 'T', 'R', 'OW0', 'IH0', 'N', 'T', 'EH1', 'S', 'T', 'AH0', 'N', 'AH0', 'L']), 'gastronomic': ('NN', ['G', 'AH0', 'S', 'T', 'R', 'AA2', 'N', 'AA1', 'M', 'IH0', 'K']), 'gastronomy': ('NN', ['G', 'AE0', 'S', 'T', 'R', 'AA1', 'N', 'AH0', 'M', 'IY0']), 'gastroscope': ('NN', ['G', 'AE1', 'S', 'T', 'R', 'AH0', 'S', 'K', 'OW2', 'P']), 'gastrovascular': ('NN', ['G', 'AE2', 'S', 'T', 'R', 'OW0', 'V', 'AE1', 'S', 'K', 'Y', 'AH0', 'L', 'ER0']), 'gastrulation': ('NN', ['G', 'AE2', 'S', 'T', 'R', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'gat': ('NN', ['G', 'AE1', 'T']), 'gate': ('NN', ['G', 'EY1', 'T']), 'gated': ('VBN', ['G', 'EY1', 'T', 'IH0', 'D']), 'gateway': ('NN', ['G', 'EY1', 'T', 'W', 'EY2']), 'gathered': ('VBN', ['G', 'AE1', 'DH', 'ER0', 'D']), 'gathering': ('NN', ['G', 'AE1', 'DH', 'ER0', 'IH0', 'NG']), 'gather': ('NN', ['G', 'AE1', 'DH', 'ER0']), 'gatherer': ('NN', ['G', 'AE1', 'DH', 'ER0', 'ER0']), 'gauche': ('NN', ['G', 'OW1', 'SH']), 'gauchos': ('NN', ['G', 'AW1', 'CH', 'OW0', 'Z']), 'gaucho': ('NN', ['G', 'AW1', 'CH', 'OW0']), 'gaudy': ('NN', ['G', 'AO1', 'D', 'IY0']), 'gauged': ('VBN', ['G', 'EY1', 'JH', 'D']), 'gauging': ('VBG', ['G', 'EY1', 'JH', 'IH0', 'NG']), 'gauge': ('NN', ['G', 'EY1', 'JH']), 'gauger': ('NN', ['G', 'EY1', 'JH', 'ER0']), 'gaul': ('NN', ['G', 'AO1', 'L']), 'gault': ('NN', ['G', 'AO1', 'L', 'T']), 'gaunt': ('NN', ['G', 'AO1', 'N', 'T']), 'gauntlet': ('NN', ['G', 'AO1', 'N', 'T', 'L', 'AH0', 'T']), 'gauze': ('NN', ['G', 'AO1', 'Z']), 'gave': ('VBD', ['G', 'EY1', 'V']), 'gavel': ('NN', ['G', 'AE1', 'V', 'AH0', 'L']), 'gawk': ('NN', ['G', 'AO1', 'K']), 'gawky': ('NN', ['G', 'AO1', 'K', 'IY0']), 'gay': ('NN', ['G', 'EY1']), 'gayness': ('NN', ['G', 'EY1', 'N', 'AH0', 'S']), 'gazed': ('VBN', ['G', 'EY1', 'Z', 'D']), 'gazing': ('VBG', ['G', 'EY1', 'Z', 'IH0', 'NG']), 'gaze': ('NN', ['G', 'EY1', 'Z']), 'gazelle': ('NN', ['G', 'AH0', 'Z', 'EH1', 'L']), 'gazette': ('NN', ['G', 'AH0', 'Z', 'EH1', 'T']), 'gean': ('NN', ['JH', 'IY1', 'N']), 'gear': ('NN', ['G', 'IH1', 'R']), 'geared': ('VBN', ['G', 'IH1', 'R', 'D']), 'gearing': ('VBG', ['G', 'IH1', 'R', 'IH0', 'NG']), 'geck': ('NN', ['JH', 'EH1', 'K']), 'gecko': ('NN', ['G', 'EH1', 'K', 'OW0']), 'ged': ('VBN', ['G', 'EH1', 'D']), 'geeing': ('VBG', ['JH', 'IY1', 'IH0', 'NG']), 'gee': ('NN', ['JH', 'IY1']), 'geer': ('NN', ['G', 'IH1', 'R']), 'geese': ('JJ', ['G', 'IY1', 'S']), 'geez': ('NN', ['JH', 'IY1', 'Z']), 'gelatin': ('NN', ['JH', 'EH1', 'L', 'AH0', 'T', 'AH0', 'N']), 'gelatine': ('NN', ['JH', 'EH2', 'L', 'AH0', 'T', 'IY1', 'N']), 'gelatinous': ('JJ', ['JH', 'AH0', 'L', 'AE1', 'T', 'AH0', 'N', 'AH0', 'S']), 'gelder': ('NN', ['G', 'EH1', 'L', 'D', 'ER0']), 'gem': ('NN', ['JH', 'EH1', 'M']), 'geminate': ('NN', ['JH', 'EH1', 'M', 'AH0', 'N', 'AH0', 'T']), 'gemini': ('NN', ['JH', 'EH1', 'M', 'AH0', 'N', 'AY2']), 'gemma': ('NN', ['JH', 'EH1', 'M', 'AH0']), 'gems': ('NNS', ['JH', 'EH1', 'M', 'Z']), 'gemsbok': ('NNS', ['G', 'EH1', 'M', 'Z', 'B', 'AA0', 'K']), 'gena': ('NN', ['JH', 'EH1', 'N', 'AH0']), 'gendarme': ('NN', ['ZH', 'AA1', 'N', 'D', 'AA2', 'R', 'M']), 'gender': ('NN', ['JH', 'EH1', 'N', 'D', 'ER0']), 'genealogy': ('NN', ['JH', 'IY2', 'N', 'IY0', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'genera': ('NN', ['JH', 'EH1', 'N', 'ER0', 'AH0']), 'general': ('JJ', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L']), 'generalissimo': ('NN', ['JH', 'EH2', 'N', 'EH0', 'R', 'AH0', 'L', 'IH1', 'S', 'IH0', 'M', 'OW2']), 'generalities': ('NNS', ['JH', 'EH2', 'N', 'ER0', 'AE1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'generality': ('NN', ['JH', 'EH2', 'N', 'ER0', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'generalization': ('NN', ['JH', 'EH2', 'N', 'ER0', 'AH0', 'L', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'generalized': ('VBN', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'D']), 'generalizing': ('VBG', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'generalize': ('NN', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L', 'AY2', 'Z']), 'generally': ('RB', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L', 'IY0']), 'generalship': ('NN', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'L', 'SH', 'IH2', 'P']), 'generated': ('VBN', ['JH', 'EH1', 'N', 'ER0', 'EY2', 'T', 'AH0', 'D']), 'generating': ('VBG', ['JH', 'EH1', 'N', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'generate': ('NN', ['JH', 'EH1', 'N', 'ER0', 'EY2', 'T']), 'generation': ('NN', ['JH', 'EH2', 'N', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'generative': ('NN', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'generator': ('NN', ['JH', 'EH1', 'N', 'ER0', 'EY2', 'T', 'ER0']), 'generic': ('NN', ['JH', 'AH0', 'N', 'EH1', 'R', 'IH0', 'K']), 'generically': ('RB', ['JH', 'AH0', 'N', 'EH1', 'R', 'IH0', 'K', 'L', 'IY0']), 'generosity': ('NN', ['JH', 'EH2', 'N', 'ER0', 'AA1', 'S', 'AH0', 'T', 'IY0']), 'generous': ('JJ', ['JH', 'EH1', 'N', 'ER0', 'AH0', 'S']), 'genesis': ('NN', ['JH', 'EH1', 'N', 'AH0', 'S', 'AH0', 'S']), 'genet': ('NN', ['JH', 'EH1', 'N', 'IH0', 'T']), 'genetic': ('JJ', ['JH', 'AH0', 'N', 'EH1', 'T', 'IH0', 'K']), 'genetically': ('RB', ['JH', 'AH0', 'N', 'EH1', 'T', 'IH0', 'K', 'L', 'IY0']), 'geneva': ('NN', ['JH', 'AH0', 'N', 'IY1', 'V', 'AH0']), 'genial': ('NN', ['JH', 'IY1', 'N', 'Y', 'AH0', 'L']), 'geniality': ('NN', ['JH', 'IY2', 'N', 'IY0', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'genie': ('NN', ['JH', 'IY1', 'N', 'IY0']), 'genital': ('NN', ['JH', 'EH1', 'N', 'AH0', 'T', 'AH0', 'L']), 'genitals': ('NNS', ['JH', 'EH1', 'N', 'AH0', 'T', 'AH0', 'L', 'Z']), 'geniuses': ('NNS', ['JH', 'IY1', 'N', 'Y', 'AH0', 'S', 'IH0', 'Z']), 'genius': ('NN', ['JH', 'IY1', 'N', 'Y', 'AH0', 'S']), 'genoese': ('JJ', ['JH', 'EH1', 'N', 'OW0', 'S']), 'genre': ('NN', ['ZH', 'AA1', 'N', 'R', 'AH0']), 'gens': ('NNS', ['JH', 'EH1', 'N', 'Z']), 'gent': ('NN', ['JH', 'EH1', 'N', 'T']), 'genteel': ('NN', ['JH', 'EH0', 'N', 'T', 'IY1', 'L']), 'gentian': ('JJ', ['JH', 'EH1', 'N', 'SH', 'AH0', 'N']), 'gentile': ('NN', ['JH', 'EH1', 'N', 'T', 'AY2', 'L']), 'gentility': ('NN', ['JH', 'EH0', 'N', 'T', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'gentle': ('NN', ['JH', 'EH1', 'N', 'T', 'AH0', 'L']), 'gentlemen': ('NNS', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'M', 'IH0', 'N']), 'gentleman': ('NN', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'M', 'AH0', 'N']), 'gentlemanly': ('RB', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'M', 'AH0', 'N', 'L', 'IY0']), 'gentleness': ('NN', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'N', 'AH0', 'S']), 'gentlewomen': ('NNS', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'W', 'IH2', 'M', 'AH0', 'N']), 'gentlewoman': ('NN', ['JH', 'EH1', 'N', 'T', 'AH0', 'L', 'W', 'UH2', 'M', 'AH0', 'N']), 'gently': ('RB', ['JH', 'EH1', 'N', 'T', 'L', 'IY0']), 'gentry': ('NN', ['JH', 'EH1', 'N', 'T', 'R', 'IY0']), 'genuine': ('NN', ['JH', 'EH1', 'N', 'Y', 'AH0', 'W', 'AH0', 'N']), 'genus': ('NN', ['JH', 'IY1', 'N', 'AH0', 'S']), 'geocentric': ('NN', ['JH', 'IY2', 'OW0', 'S', 'EH1', 'N', 'T', 'R', 'IH0', 'K']), 'geodesic': ('NN', ['JH', 'IY2', 'AH0', 'D', 'EH1', 'S', 'IH0', 'K']), 'geodesy': ('NN', ['JH', 'IY0', 'AA1', 'D', 'AH0', 'S', 'IY0']), 'geographer': ('NN', ['JH', 'IY0', 'AA1', 'G', 'R', 'AH0', 'F', 'ER0']), 'geographic': ('JJ', ['JH', 'IY2', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K']), 'geographical': ('JJ', ['JH', 'IY2', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K', 'AH0', 'L']), 'geographically': ('RB', ['JH', 'IY2', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K', 'AH0', 'L', 'IY0']), 'geography': ('NN', ['JH', 'IY0', 'AA1', 'G', 'R', 'AH0', 'F', 'IY0']), 'geologic': ('NN', ['JH', 'IY2', 'AH0', 'L', 'AA1', 'JH', 'IH0', 'K']), 'geological': ('JJ', ['JH', 'IY2', 'AH0', 'L', 'AA1', 'JH', 'IH0', 'K', 'AH0', 'L']), 'geologist': ('NN', ['JH', 'IY0', 'AA1', 'L', 'AH0', 'JH', 'AH0', 'S', 'T']), 'geology': ('NN', ['JH', 'IY0', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'geometric': ('NN', ['JH', 'IY2', 'AH0', 'M', 'EH1', 'T', 'R', 'IH0', 'K']), 'geometrical': ('JJ', ['JH', 'IY2', 'AH0', 'M', 'EH1', 'T', 'R', 'IH0', 'K', 'AH0', 'L']), 'geometrically': ('RB', ['JH', 'IY2', 'AH0', 'M', 'EH1', 'T', 'R', 'IH0', 'K', 'L', 'IY0']), 'geometries': ('NNS', ['JH', 'IY0', 'AA1', 'M', 'AH0', 'T', 'R', 'IY0', 'Z']), 'geometry': ('NN', ['JH', 'IY0', 'AA1', 'M', 'AH0', 'T', 'R', 'IY0']), 'geordie': ('NN', ['JH', 'IY1', 'ER0', 'D', 'IY0']), 'george': ('NN', ['JH', 'AO1', 'R', 'JH']), 'georgian': ('JJ', ['JH', 'AO1', 'R', 'JH', 'AH0', 'N']), 'geotropic': ('NN', ['JH', 'IY2', 'AH0', 'T', 'R', 'AA1', 'P', 'IH0', 'K']), 'geotropism': ('NN', ['JH', 'IY0', 'AA1', 'T', 'R', 'AH0', 'P', 'IH2', 'Z', 'AH0', 'M']), 'geranium': ('NN', ['JH', 'ER0', 'EY1', 'N', 'IY0', 'AH0', 'M']), 'gere': ('RB', ['JH', 'IH1', 'R']), 'germ': ('NN', ['JH', 'ER1', 'M']), 'germain': ('NN', ['JH', 'ER0', 'M', 'EY1', 'N']), 'german': ('JJ', ['JH', 'ER1', 'M', 'AH0', 'N']), 'germans': ('NNS', ['JH', 'ER1', 'M', 'AH0', 'N', 'Z']), 'germane': ('NN', ['JH', 'ER0', 'M', 'EY1', 'N']), 'germanic': ('NN', ['JH', 'ER0', 'M', 'AE1', 'N', 'IH0', 'K']), 'germicide': ('NN', ['JH', 'ER1', 'M', 'AH0', 'S', 'AY2', 'D']), 'germinated': ('VBN', ['JH', 'ER1', 'M', 'AH0', 'N', 'EY2', 'T', 'IH0', 'D']), 'germinate': ('NN', ['JH', 'ER1', 'M', 'AH0', 'N', 'EY2', 'T']), 'germination': ('NN', ['JH', 'ER2', 'M', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'gerner': ('NN', ['G', 'ER1', 'N', 'ER0']), 'gerrymandered': ('VBN', ['JH', 'EH2', 'R', 'IY0', 'M', 'AE1', 'N', 'D', 'ER0', 'D']), 'gerrymandering': ('VBG', ['JH', 'EH2', 'R', 'IY0', 'M', 'AE1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'gerrymander': ('NN', ['JH', 'EH1', 'R', 'IY0', 'M', 'AE2', 'N', 'D', 'ER0']), 'gery': ('NN', ['JH', 'EH1', 'R', 'IY0']), 'gest': ('NN', ['JH', 'EH1', 'S', 'T']), 'gestation': ('NN', ['JH', 'EH0', 'S', 'T', 'EY1', 'SH', 'AH0', 'N']), 'gesture': ('NN', ['JH', 'EH1', 'S', 'CH', 'ER0']), 'gestured': ('VBN', ['JH', 'EH1', 'S', 'CH', 'ER0', 'D']), 'gesturing': ('VBG', ['JH', 'EH1', 'S', 'CH', 'ER0', 'IH0', 'NG']), 'get': ('VB', ['G', 'EH1', 'T']), 'got': ('VBD', ['G', 'AA1', 'T']), 'gotten': ('NNS', ['G', 'AA1', 'T', 'AH0', 'N']), 'getting': ('VBG', ['G', 'EH1', 'T', 'IH0', 'NG']), 'getter': ('NN', ['G', 'EH1', 'T', 'ER0']), 'gewgaw': ('NN', ['G', 'Y', 'UW1', 'G', 'AA0']), 'geyser': ('NN', ['G', 'AY1', 'Z', 'ER0']), 'ghastliness': ('NN', ['G', 'AE1', 'S', 'T', 'L', 'IY0', 'N', 'AH0', 'S']), 'ghastly': ('RB', ['G', 'AE1', 'S', 'T', 'L', 'IY0']), 'ghee': ('NN', ['G', 'IY1']), 'ghetto': ('NN', ['G', 'EH1', 'T', 'OW0']), 'ghost': ('NN', ['G', 'OW1', 'S', 'T']), 'ghostlike': ('NN', ['G', 'OW1', 'S', 'T', 'L', 'AY2', 'K']), 'ghostly': ('RB', ['G', 'OW1', 'S', 'T', 'L', 'IY0']), 'ghoul': ('NN', ['G', 'UW1', 'L']), 'ghoulish': ('NN', ['G', 'UW1', 'L', 'IH0', 'SH']), 'giant': ('NN', ['JH', 'AY1', 'AH0', 'N', 'T']), 'gib': ('NN', ['G', 'IH1', 'B']), 'gibberish': ('NN', ['G', 'IH1', 'B', 'ER0', 'IH0', 'SH']), 'gibbon': ('NN', ['G', 'IH1', 'B', 'AH0', 'N']), 'gibe': ('NN', ['JH', 'AY1', 'B']), 'giblet': ('NN', ['JH', 'IH1', 'B', 'L', 'AH0', 'T']), 'giddy': ('NN', ['G', 'IH1', 'D', 'IY0']), 'giffy': ('NN', ['G', 'IH1', 'F', 'IY0']), 'gift': ('NN', ['G', 'IH1', 'F', 'T']), 'gifted': ('VBN', ['G', 'IH1', 'F', 'T', 'AH0', 'D']), 'gifting': ('VBG', ['G', 'IH1', 'F', 'T', 'IH0', 'NG']), 'gig': ('NN', ['G', 'IH1', 'G']), 'gigantic': ('JJ', ['JH', 'AY0', 'G', 'AE1', 'N', 'T', 'IH0', 'K']), 'guide': ('NN', ['G', 'AY1', 'D']), 'giggled': ('VBN', ['G', 'IH1', 'G', 'AH0', 'L', 'D']), 'giggling': ('VBG', ['G', 'IH1', 'G', 'AH0', 'L', 'IH0', 'NG']), 'giggle': ('NN', ['G', 'IH1', 'G', 'AH0', 'L']), 'giggly': ('RB', ['G', 'IH1', 'G', 'AH0', 'L', 'IY0']), 'gigot': ('NN', ['JH', 'IH1', 'G', 'AH0', 'T']), 'gilded': ('VBN', ['G', 'IH1', 'L', 'D', 'IH0', 'D']), 'gilt': ('NN', ['G', 'IH1', 'L', 'T']), 'gilding': ('VBG', ['G', 'IH1', 'L', 'D', 'IH0', 'NG']), 'gild': ('NN', ['G', 'IH1', 'L', 'D']), 'gilden': ('NN', ['G', 'IH1', 'L', 'D', 'AH0', 'N']), 'gilder': ('NN', ['G', 'IH1', 'L', 'D', 'ER0']), 'gile': ('NN', ['G', 'AY1', 'L']), 'gill': ('NN', ['G', 'IH1', 'L']), 'gillian': ('NN', ['JH', 'IH1', 'L', 'IY0', 'AH0', 'N']), 'gin': ('NN', ['JH', 'IH1', 'N']), 'gun': ('NN', ['G', 'AH1', 'N']), 'ginning': ('VBG', ['JH', 'IH1', 'N', 'IH0', 'NG']), 'ginned': ('VBN', ['JH', 'IH1', 'N', 'D']), 'ging': ('VBG', ['JH', 'IH1', 'NG']), 'ginger': ('NN', ['JH', 'IH1', 'N', 'JH', 'ER0']), 'gingerbread': ('NN', ['JH', 'IH1', 'N', 'JH', 'ER0', 'B', 'R', 'EH2', 'D']), 'gingerly': ('RB', ['JH', 'IH1', 'N', 'JH', 'ER0', 'L', 'IY0']), 'gingham': ('NN', ['G', 'IH1', 'NG', 'AH0', 'M']), 'ginn': ('NN', ['JH', 'IH1', 'N']), 'ginseng': ('NN', ['JH', 'IH1', 'N', 'S', 'EH2', 'NG']), 'giraffe': ('NN', ['JH', 'ER0', 'AE1', 'F']), 'gird': ('NN', ['G', 'ER1', 'D']), 'girt': ('NN', ['G', 'ER1', 'T']), 'girding': ('VBG', ['G', 'ER1', 'D', 'IH0', 'NG']), 'girder': ('NN', ['G', 'ER1', 'D', 'ER0']), 'girdle': ('NN', ['G', 'ER1', 'D', 'AH0', 'L']), 'girdler': ('NN', ['G', 'ER1', 'D', 'AH0', 'L', 'ER0']), 'gire': ('NN', ['G', 'AY1', 'R']), 'girl': ('NN', ['G', 'ER1', 'L']), 'girlhood': ('NN', ['G', 'ER1', 'L', 'HH', 'UH2', 'D']), 'girlish': ('NN', ['G', 'ER1', 'L', 'IH0', 'SH']), 'girth': ('NN', ['G', 'ER1', 'TH']), 'gist': ('NN', ['JH', 'IH1', 'S', 'T']), 'giusto': ('NN', ['JH', 'UW1', 'S', 'T', 'OW0']), 'given': ('VBN', ['G', 'IH1', 'V', 'AH0', 'N']), 'giving': ('VBG', ['G', 'IH1', 'V', 'IH0', 'NG']), 'give': ('VB', ['G', 'IH1', 'V']), 'giver': ('NN', ['G', 'IH1', 'V', 'ER0']), 'gives': ('VBZ', ['G', 'IH1', 'V', 'Z']), 'gizzard': ('NN', ['G', 'IH1', 'Z', 'ER0', 'D']), 'glacial': ('JJ', ['G', 'L', 'EY1', 'SH', 'AH0', 'L']), 'glaciate': ('NN', ['G', 'L', 'EY1', 'SH', 'IY0', 'EY2', 'T']), 'glaciation': ('NN', ['G', 'L', 'EY2', 'SH', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'glacier': ('NN', ['G', 'L', 'EY1', 'SH', 'ER0']), 'glacis': ('NN', ['G', 'L', 'EY1', 'S', 'AH0', 'S']), 'glad': ('NN', ['G', 'L', 'AE1', 'D']), 'gladding': ('VBG', ['G', 'L', 'AE1', 'D', 'IH0', 'NG']), 'gladden': ('NN', ['G', 'L', 'AE1', 'D', 'AH0', 'N']), 'glade': ('NN', ['G', 'L', 'EY1', 'D']), 'gladiator': ('NN', ['G', 'L', 'AE1', 'D', 'IY0', 'EY2', 'T', 'ER0']), 'gladiolus': ('NN', ['G', 'L', 'AE2', 'D', 'IY0', 'OW1', 'L', 'AH0', 'S']), 'gladly': ('RB', ['G', 'L', 'AE1', 'D', 'L', 'IY0']), 'gladstone': ('NN', ['G', 'L', 'AE1', 'D', 'S', 'T', 'OW2', 'N']), 'glamour': ('NN', ['G', 'L', 'AE1', 'M', 'ER0']), 'glance': ('NN', ['G', 'L', 'AE1', 'N', 'S']), 'glanced': ('VBN', ['G', 'L', 'AE1', 'N', 'S', 'T']), 'glancing': ('VBG', ['G', 'L', 'AE1', 'N', 'S', 'IH0', 'NG']), 'gland': ('NN', ['G', 'L', 'AE1', 'N', 'D']), 'glandular': ('NN', ['G', 'L', 'AE1', 'N', 'JH', 'AH0', 'L', 'ER0']), 'glared': ('VBN', ['G', 'L', 'EH1', 'R', 'D']), 'glaring': ('VBG', ['G', 'L', 'EH1', 'R', 'IH0', 'NG']), 'glare': ('NN', ['G', 'L', 'EH1', 'R']), 'glass': ('NN', ['G', 'L', 'AE1', 'S']), 'glassed': ('VBN', ['G', 'L', 'AE1', 'S', 'T']), 'glassmaker': ('NN', ['G', 'L', 'AE1', 'S', 'M', 'EY2', 'K', 'ER0']), 'glassware': ('NN', ['G', 'L', 'AE1', 'S', 'W', 'EH2', 'R']), 'glassy': ('NN', ['G', 'L', 'AE1', 'S', 'IY0']), 'glaucoma': ('NN', ['G', 'L', 'AO0', 'K', 'OW1', 'M', 'AH0']), 'glazing': ('VBG', ['G', 'L', 'EY1', 'Z', 'IH0', 'NG']), 'glaze': ('NN', ['G', 'L', 'EY1', 'Z']), 'glazer': ('NN', ['G', 'L', 'EY1', 'Z', 'ER0']), 'glazier': ('NN', ['G', 'L', 'EY1', 'Z', 'IY0', 'ER0']), 'gleam': ('NN', ['G', 'L', 'IY1', 'M']), 'gleamed': ('VBN', ['G', 'L', 'IY1', 'M', 'D']), 'gleaming': ('VBG', ['G', 'L', 'IY1', 'M', 'IH0', 'NG']), 'gleaned': ('VBN', ['G', 'L', 'IY1', 'N', 'D']), 'glean': ('NN', ['G', 'L', 'IY1', 'N']), 'glee': ('NN', ['G', 'L', 'IY1']), 'gleeful': ('NN', ['G', 'L', 'IY1', 'F', 'AH0', 'L']), 'glen': ('NN', ['G', 'L', 'EH1', 'N']), 'glew': ('NN', ['G', 'L', 'UW1']), 'glib': ('NN', ['G', 'L', 'IH1', 'B']), 'glibly': ('RB', ['G', 'L', 'IH1', 'B', 'L', 'IY0']), 'glidden': ('NN', ['G', 'L', 'IH1', 'D', 'AH0', 'N']), 'glide': ('NN', ['G', 'L', 'AY1', 'D']), 'glided': ('VBN', ['G', 'L', 'AY1', 'D', 'IH0', 'D']), 'gliding': ('VBG', ['G', 'L', 'AY1', 'D', 'IH0', 'NG']), 'glider': ('NN', ['G', 'L', 'AY1', 'D', 'ER0']), 'glimmering': ('VBG', ['G', 'L', 'IH1', 'M', 'ER0', 'IH0', 'NG']), 'glimmer': ('NN', ['G', 'L', 'IH1', 'M', 'ER0']), 'glimpse': ('NN', ['G', 'L', 'IH1', 'M', 'P', 'S']), 'glimpsed': ('VBN', ['G', 'L', 'IH1', 'M', 'P', 'S', 'T']), 'glint': ('NN', ['G', 'L', 'IH1', 'N', 'T']), 'glinting': ('VBG', ['G', 'L', 'IH1', 'N', 'T', 'IH0', 'NG']), 'glistened': ('VBN', ['G', 'L', 'IH1', 'S', 'AH0', 'N', 'D']), 'glistening': ('VBG', ['G', 'L', 'IH1', 'S', 'AH0', 'N', 'IH0', 'NG']), 'glisten': ('NNS', ['G', 'L', 'IH1', 'S', 'AH0', 'N']), 'glittered': ('VBN', ['G', 'L', 'IH1', 'T', 'ER0', 'D']), 'glittering': ('VBG', ['G', 'L', 'IH1', 'T', 'ER0', 'IH0', 'NG']), 'glitter': ('NN', ['G', 'L', 'IH1', 'T', 'ER0']), 'gloam': ('NN', ['G', 'L', 'OW1', 'M']), 'gloaming': ('VBG', ['G', 'L', 'OW1', 'M', 'IH0', 'NG']), 'gloated': ('VBN', ['G', 'L', 'OW1', 'T', 'IH0', 'D']), 'gloating': ('VBG', ['G', 'L', 'OW1', 'T', 'IH0', 'NG']), 'gloat': ('NN', ['G', 'L', 'OW1', 'T']), 'globe': ('NN', ['G', 'L', 'OW1', 'B']), 'globular': ('NN', ['G', 'L', 'AA1', 'B', 'Y', 'AH0', 'L', 'ER0']), 'globulin': ('NN', ['G', 'L', 'AA1', 'B', 'Y', 'AH0', 'L', 'IH0', 'N']), 'gloom': ('NN', ['G', 'L', 'UW1', 'M']), 'gloomily': ('RB', ['G', 'L', 'UW1', 'M', 'AH0', 'L', 'IY0']), 'gloomy': ('NN', ['G', 'L', 'UW1', 'M', 'IY0']), 'glore': ('NN', ['G', 'L', 'AO1', 'R']), 'gloria': ('NNS', ['G', 'L', 'AO1', 'R', 'IY0', 'AH0']), 'glorification': ('NN', ['G', 'L', 'AO2', 'R', 'AH0', 'F', 'IH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'glorified': ('VBN', ['G', 'L', 'AO1', 'R', 'AH0', 'F', 'AY2', 'D']), 'glorifying': ('VBG', ['G', 'L', 'AO1', 'R', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'glorify': ('NN', ['G', 'L', 'AO1', 'R', 'AH0', 'F', 'AY2']), 'glorioso': ('NN', ['G', 'L', 'AO0', 'R', 'IY0', 'OW1', 'S', 'OW0']), 'glorious': ('JJ', ['G', 'L', 'AO1', 'R', 'IY0', 'AH0', 'S']), 'glory': ('NN', ['G', 'L', 'AO1', 'R', 'IY0']), 'gloss': ('NN', ['G', 'L', 'AO1', 'S']), 'glossed': ('VBN', ['G', 'L', 'AO1', 'S', 'T']), 'glossary': ('JJ', ['G', 'L', 'AO1', 'S', 'ER0', 'IY0']), 'glosser': ('NN', ['G', 'L', 'AO1', 'S', 'ER0']), 'glossy': ('NN', ['G', 'L', 'AO1', 'S', 'IY0']), 'glottal': ('NN', ['G', 'L', 'AA1', 'T', 'AH0', 'L']), 'glottis': ('NN', ['G', 'L', 'AA1', 'T', 'AH0', 'S']), 'glove': ('NN', ['G', 'L', 'AH1', 'V']), 'gloved': ('VBN', ['G', 'L', 'AH1', 'V', 'D']), 'glover': ('NN', ['G', 'L', 'AH1', 'V', 'ER0']), 'glowed': ('NN', ['G', 'L', 'OW1', 'D']), 'glowing': ('VBG', ['G', 'L', 'OW1', 'IH0', 'NG']), 'glow': ('NN', ['G', 'L', 'OW1']), 'glowered': ('VBN', ['G', 'L', 'AW1', 'ER0', 'D']), 'glowering': ('VBG', ['G', 'L', 'AW1', 'ER0', 'IH0', 'NG']), 'glower': ('NN', ['G', 'L', 'AW1', 'ER0']), 'glowingly': ('RB', ['G', 'L', 'OW1', 'IH0', 'NG', 'L', 'IY0']), 'glucose': ('NN', ['G', 'L', 'UW1', 'K', 'OW2', 'S']), 'glucoside': ('NN', ['G', 'L', 'UW1', 'K', 'AH0', 'S', 'AY2', 'D']), 'glue': ('NN', ['G', 'L', 'UW1']), 'glued': ('VBN', ['G', 'L', 'UW1', 'D']), 'glum': ('NN', ['G', 'L', 'AH1', 'M']), 'glumly': ('NN', ['G', 'L', 'AH1', 'M', 'L', 'IY0']), 'glutted': ('VBN', ['G', 'L', 'AH1', 'T', 'IH0', 'D']), 'glut': ('NN', ['G', 'L', 'AH1', 'T']), 'glutamic': ('NN', ['G', 'L', 'UW0', 'T', 'AE1', 'M', 'IH0', 'K']), 'glutaric': ('NN', ['G', 'L', 'UW0', 'T', 'AE1', 'R', 'IH0', 'K']), 'gluten': ('NNS', ['G', 'L', 'UW1', 'T', 'AH0', 'N']), 'gluttonous': ('JJ', ['G', 'L', 'AH1', 'T', 'AH0', 'N', 'AH0', 'S']), 'gluttony': ('NN', ['G', 'L', 'AH1', 'T', 'AH0', 'N', 'IY0']), 'glycerol': ('NN', ['G', 'L', 'IH1', 'S', 'ER0', 'OW2', 'L']), 'glycogen': ('NN', ['G', 'L', 'AY1', 'K', 'AH0', 'JH', 'IH0', 'N']), 'glycol': ('NN', ['G', 'L', 'AY1', 'K', 'AO2', 'L']), 'glycolic': ('NN', ['G', 'L', 'AY0', 'K', 'AO1', 'L', 'AH0', 'K']), 'glyn': ('NN', ['G', 'L', 'IH1', 'N']), 'gnarled': ('VBN', ['N', 'AA1', 'R', 'L', 'D']), 'gnarling': ('VBG', ['N', 'AA1', 'R', 'L', 'IH0', 'NG']), 'gnarl': ('NN', ['N', 'AA1', 'R', 'L']), 'gnarly': ('RB', ['N', 'AA1', 'R', 'L', 'IY0']), 'gnashing': ('VBG', ['N', 'AE1', 'SH', 'IH0', 'NG']), 'gnash': ('NN', ['N', 'AE1', 'SH']), 'gnat': ('NN', ['N', 'AE1', 'T']), 'gnawed': ('NN', ['N', 'AO1', 'D']), 'gnawing': ('NN', ['N', 'AO1', 'IH0', 'NG']), 'gnaw': ('NN', ['N', 'AO1']), 'gneiss': ('NN', ['N', 'AY1', 'S']), 'gnome': ('NN', ['N', 'OW1', 'M']), 'gnomic': ('NN', ['N', 'OW1', 'M', 'IH0', 'K']), 'gnomonic': ('NN', ['N', 'OW0', 'M', 'AA1', 'N', 'IH0', 'K']), 'gnosticism': ('NN', ['N', 'AA1', 'S', 'T', 'IH0', 'S', 'IH2', 'Z', 'AH0', 'M']), 'gnu': ('NN', ['N', 'UW1']), 'go': ('VB', ['G', 'OW1']), 'went': ('VBD', ['W', 'EH1', 'N', 'T']), 'gone': ('VBN', ['G', 'AO1', 'N']), 'going': ('VBG', ['G', 'OW1', 'IH0', 'NG']), 'goa': ('NN', ['G', 'OW1', 'AH0']), 'goad': ('NN', ['G', 'OW1', 'D']), 'goaded': ('VBN', ['G', 'OW1', 'D', 'IH0', 'D']), 'goading': ('VBG', ['G', 'OW1', 'D', 'IH0', 'NG']), 'goal': ('NN', ['G', 'OW1', 'L']), 'goar': ('NN', ['G', 'AO1', 'R']), 'goat': ('NN', ['G', 'OW1', 'T']), 'goatee': ('NN', ['G', 'OW1', 'T', 'IY1']), 'gob': ('NN', ['G', 'AA1', 'B']), 'gobbled': ('VBN', ['G', 'AA1', 'B', 'AH0', 'L', 'D']), 'gobbling': ('VBG', ['G', 'AA1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'gobble': ('JJ', ['G', 'AA1', 'B', 'AH0', 'L']), 'gobbler': ('NN', ['G', 'AA1', 'B', 'AH0', 'L', 'ER0']), 'goblet': ('NN', ['G', 'AA1', 'B', 'L', 'AH0', 'T']), 'goblin': ('NN', ['G', 'AA1', 'B', 'L', 'IH0', 'N']), 'god': ('NN', ['G', 'AA1', 'D']), 'goddess': ('NN', ['G', 'AA1', 'D', 'AH0', 'S']), 'godfather': ('NN', ['G', 'AA1', 'D', 'F', 'AA2', 'DH', 'ER0']), 'godhead': ('NN', ['G', 'AA1', 'D', 'HH', 'EH2', 'D']), 'godless': ('NN', ['G', 'AA1', 'D', 'L', 'AH0', 'S']), 'godly': ('RB', ['G', 'AA1', 'D', 'L', 'IY0']), 'godmother': ('NN', ['G', 'AA1', 'D', 'M', 'AH2', 'DH', 'ER0']), 'godown': ('NN', ['G', 'OW1', 'D', 'AW2', 'N']), 'godsend': ('NN', ['G', 'AA1', 'D', 'S', 'EH2', 'N', 'D']), 'godspeed': ('NN', ['G', 'AA1', 'D', 'S', 'P', 'IY0', 'D']), 'goen': ('NN', ['G', 'OW1', 'N']), 'goer': ('NN', ['G', 'OW1', 'ER0']), 'goff': ('NN', ['G', 'AO1', 'F']), 'goggle': ('NN', ['G', 'AA1', 'G', 'AH0', 'L']), 'goiter': ('NN', ['G', 'OY1', 'T', 'ER0']), 'gold': ('NN', ['G', 'OW1', 'L', 'D']), 'golde': ('NN', ['G', 'OW1', 'L', 'D']), 'goldcrest': ('NN', ['G', 'OW1', 'L', 'D', 'K', 'R', 'EH2', 'S', 'T']), 'golden': ('JJ', ['G', 'OW1', 'L', 'D', 'AH0', 'N']), 'goldfinch': ('NN', ['G', 'OW1', 'L', 'D', 'F', 'IH2', 'N', 'CH']), 'goldfish': ('NN', ['G', 'OW1', 'L', 'D', 'F', 'IH2', 'SH']), 'goldie': ('NN', ['G', 'OW1', 'L', 'D', 'IY0']), 'goldilocks': ('NNS', ['G', 'OW1', 'L', 'D', 'IY0', 'L', 'AO2', 'K', 'S']), 'goldin': ('NN', ['G', 'OW1', 'L', 'D', 'IH0', 'N']), 'golding': ('VBG', ['G', 'OW1', 'L', 'D', 'IH0', 'NG']), 'goldsmith': ('NN', ['G', 'OW1', 'L', 'D', 'S', 'M', 'IH2', 'TH']), 'golf': ('NN', ['G', 'AA1', 'L', 'F']), 'golfer': ('NN', ['G', 'AA1', 'L', 'F', 'ER0']), 'goll': ('NN', ['G', 'AA1', 'L']), 'gomer': ('NN', ['G', 'OW1', 'M', 'ER0']), 'gonads': ('NNS', ['G', 'OW1', 'N', 'AE0', 'D', 'Z']), 'gonad': ('NN', ['G', 'OW1', 'N', 'AE0', 'D']), 'gondola': ('NN', ['G', 'AA1', 'N', 'D', 'AH0', 'L', 'AH0']), 'gondolier': ('NN', ['G', 'AA2', 'N', 'D', 'AH0', 'L', 'IH1', 'R']), 'gong': ('NN', ['G', 'AO1', 'NG']), 'goniometer': ('NN', ['G', 'OW2', 'N', 'IY0', 'AA1', 'M', 'AH0', 'T', 'ER0']), 'gonorrhea': ('NN', ['G', 'AA2', 'N', 'ER0', 'IY1', 'AH0']), 'good': ('JJ', ['G', 'UH1', 'D']), 'good-bye': ('NN', ['G', 'IH0', 'D', 'B', 'AY1']), 'goodly': ('RB', ['G', 'UH1', 'D', 'L', 'IY0']), 'goodman': ('NN', ['G', 'UH1', 'D', 'M', 'AH0', 'N']), 'good-naturedly': ('RB', ['G', 'UH1', 'D', 'N', 'EY1', 'CH', 'ER0', 'D', 'L', 'IY0']), 'goodness': ('NN', ['G', 'UH1', 'D', 'N', 'AH0', 'S']), 'goods': ('NNS', ['G', 'UH1', 'D', 'Z']), 'goodies': ('NNS', ['G', 'UH1', 'D', 'IY0', 'Z']), 'goody': ('NN', ['G', 'UH1', 'D', 'IY0']), 'guru': ('NN', ['G', 'UW1', 'R', 'UW2']), 'goose': ('NN', ['G', 'UW1', 'S']), 'gooseberries': ('NNS', ['G', 'UW1', 'S', 'B', 'EH2', 'R', 'IY0', 'Z']), 'gooseberry': ('NN', ['G', 'UW1', 'S', 'B', 'EH2', 'R', 'IY0']), 'goosefish': ('NN', ['G', 'UW1', 'S', 'F', 'IH2', 'SH']), 'goosefoot': ('NN', ['G', 'UW1', 'S', 'F', 'UH2', 'T']), 'gopher': ('NN', ['G', 'OW1', 'F', 'ER0']), 'goral': ('JJ', ['G', 'AO1', 'R', 'AH0', 'L']), 'gordian': ('JJ', ['G', 'AO1', 'R', 'D', 'IY0', 'AH0', 'N']), 'gore': ('NN', ['G', 'AO1', 'R']), 'gored': ('VBN', ['G', 'AO1', 'R', 'D']), 'goring': ('VBG', ['G', 'AO1', 'R', 'IH0', 'NG']), 'gorge': ('NN', ['G', 'AO1', 'R', 'JH']), 'gorgeous': ('JJ', ['G', 'AO1', 'R', 'JH', 'AH0', 'S']), 'gorgon': ('NN', ['G', 'AO1', 'R', 'G', 'AH0', 'N']), 'gorgonian': ('JJ', ['G', 'AO0', 'R', 'G', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'gorilla': ('NN', ['G', 'ER0', 'IH1', 'L', 'AH0']), 'gory': ('NN', ['G', 'AO1', 'R', 'IY0']), 'goshawk': ('NN', ['G', 'AA1', 'S', 'HH', 'AO2', 'K']), 'gospel': ('NN', ['G', 'AA1', 'S', 'P', 'AH0', 'L']), 'goss': ('NN', ['G', 'AO1', 'S']), 'gossamer': ('NN', ['G', 'AA1', 'S', 'AH0', 'M', 'ER0']), 'gossip': ('NN', ['G', 'AA1', 'S', 'AH0', 'P']), 'gossiping': ('VBG', ['G', 'AA1', 'S', 'AH0', 'P', 'IH0', 'NG']), 'gossiper': ('NN', ['G', 'AA1', 'S', 'AH0', 'P', 'ER0']), 'gossipy': ('NN', ['G', 'AA1', 'S', 'AH0', 'P', 'IY0']), 'goth': ('NN', ['G', 'AA1', 'TH']), 'gothic': ('JJ', ['G', 'AA1', 'TH', 'IH0', 'K']), 'gouge': ('NN', ['G', 'AW1', 'JH']), 'gouged': ('VBN', ['G', 'AW1', 'JH', 'D']), 'gouging': ('VBG', ['G', 'AW1', 'JH', 'IH0', 'NG']), 'gouger': ('NN', ['G', 'AW1', 'JH', 'ER0']), 'gourd': ('NN', ['G', 'AO1', 'R', 'D']), 'gourmet': ('NN', ['G', 'UH1', 'R', 'M', 'EY2']), 'gout': ('NN', ['G', 'AW1', 'T']), 'gouty': ('NN', ['G', 'AW1', 'T', 'IY0']), 'gove': ('NN', ['G', 'OW1', 'V']), 'governed': ('VBN', ['G', 'AH1', 'V', 'ER0', 'N', 'D']), 'governing': ('VBG', ['G', 'AH1', 'V', 'ER0', 'N', 'IH0', 'NG']), 'govern': ('NN', ['G', 'AH1', 'V', 'ER0', 'N']), 'governance': ('NN', ['G', 'AH1', 'V', 'ER0', 'N', 'AH0', 'N', 'S']), 'governess': ('NN', ['G', 'AH1', 'V', 'ER0', 'N', 'AH0', 'S']), 'government': ('NN', ['G', 'AH1', 'V', 'ER0', 'M', 'AH0', 'N', 'T']), 'governmental': ('NN', ['G', 'AH1', 'V', 'ER0', 'M', 'EH2', 'N', 'T', 'AH0', 'L']), 'governor': ('NN', ['G', 'AH1', 'V', 'ER0', 'N', 'ER0']), 'governorship': ('NN', ['G', 'AH1', 'V', 'ER0', 'N', 'ER0', 'SH', 'IH2', 'P']), 'gowan': ('NN', ['G', 'AW1', 'AH0', 'N']), 'gown': ('NN', ['G', 'AW1', 'N']), 'grab': ('NN', ['G', 'R', 'AE1', 'B']), 'grabbed': ('NNS', ['G', 'R', 'AE1', 'B', 'D']), 'grabbing': ('VBG', ['G', 'R', 'AE1', 'B', 'IH0', 'NG']), 'grabber': ('NN', ['G', 'R', 'AE1', 'B', 'ER0']), 'grace': ('NN', ['G', 'R', 'EY1', 'S']), 'graced': ('VBN', ['G', 'R', 'EY1', 'S', 'T']), 'gracing': ('VBG', ['G', 'R', 'EY1', 'S', 'IH0', 'NG']), 'graceful': ('NN', ['G', 'R', 'EY1', 'S', 'F', 'AH0', 'L']), 'graceless': ('NN', ['G', 'R', 'EY1', 'S', 'L', 'AH0', 'S']), 'gracile': ('NN', ['G', 'R', 'AE1', 'S', 'AH0', 'L']), 'gracious': ('JJ', ['G', 'R', 'EY1', 'SH', 'AH0', 'S']), 'graciously': ('RB', ['G', 'R', 'EY1', 'SH', 'AH0', 'S', 'L', 'IY0']), 'graciousness': ('NN', ['G', 'R', 'EY1', 'SH', 'AH0', 'S', 'N', 'AH0', 'S']), 'grackle': ('NN', ['G', 'R', 'AE1', 'K', 'AH0', 'L']), 'gradate': ('NN', ['G', 'R', 'EY1', 'D', 'EY0', 'T']), 'gradation': ('NN', ['G', 'R', 'EY0', 'D', 'EY1', 'SH', 'AH0', 'N']), 'grade': ('NN', ['G', 'R', 'EY1', 'D']), 'graded': ('VBN', ['G', 'R', 'EY1', 'D', 'AH0', 'D']), 'grading': ('VBG', ['G', 'R', 'EY1', 'D', 'IH0', 'NG']), 'grader': ('NN', ['G', 'R', 'EY1', 'D', 'ER0']), 'gradient': ('NN', ['G', 'R', 'EY1', 'D', 'IY0', 'AH0', 'N', 'T']), 'gradual': ('JJ', ['G', 'R', 'AE1', 'JH', 'UW0', 'AH0', 'L']), 'gradually': ('RB', ['G', 'R', 'AE1', 'JH', 'UW0', 'AH0', 'L', 'IY0']), 'graduated': ('VBN', ['G', 'R', 'AE1', 'JH', 'UW0', 'EY2', 'T', 'IH0', 'D']), 'graduating': ('VBG', ['G', 'R', 'AE1', 'JH', 'AH0', 'W', 'EY2', 'T', 'IH0', 'NG']), 'graduate': ('NN', ['G', 'R', 'AE1', 'JH', 'AH0', 'W', 'AH0', 'T']), 'graduation': ('NN', ['G', 'R', 'AE2', 'JH', 'UW0', 'EY1', 'SH', 'AH0', 'N']), 'gradus': ('NN', ['G', 'R', 'EY1', 'D', 'AH0', 'S']), 'graf': ('NN', ['G', 'R', 'AE1', 'F']), 'graff': ('NN', ['G', 'R', 'AE1', 'F']), 'graffiti': ('NN', ['G', 'R', 'AH0', 'F', 'IY1', 'T', 'IY0']), 'graft': ('NN', ['G', 'R', 'AE1', 'F', 'T']), 'grafted': ('VBN', ['G', 'R', 'AE1', 'F', 'T', 'IH0', 'D']), 'grafting': ('VBG', ['G', 'R', 'AE1', 'F', 'T', 'IH0', 'NG']), 'grail': ('NN', ['G', 'R', 'EY1', 'L']), 'grain': ('NN', ['G', 'R', 'EY1', 'N']), 'grained': ('VBN', ['G', 'R', 'EY1', 'N', 'D']), 'grains': ('NNS', ['G', 'R', 'EY1', 'N', 'Z']), 'grainy': ('NN', ['G', 'R', 'EY1', 'N', 'IY0']), 'gram': ('NN', ['G', 'R', 'AE1', 'M']), 'gramercy': ('NN', ['G', 'R', 'AH0', 'M', 'ER1', 'S', 'IY0']), 'grammar': ('NN', ['G', 'R', 'AE1', 'M', 'ER0']), 'grammatical': ('JJ', ['G', 'R', 'AH0', 'M', 'AE1', 'T', 'AH0', 'K', 'AH0', 'L']), 'granade': ('NN', ['G', 'R', 'AH0', 'N', 'EY1', 'D']), 'granado': ('NN', ['G', 'R', 'AA0', 'N', 'AA1', 'D', 'OW0']), 'grand': ('JJ', ['G', 'R', 'AE1', 'N', 'D']), 'grandchild': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'CH', 'AY2', 'L', 'D']), 'granddaughter': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'AO2', 'T', 'ER0']), 'grandeur': ('NN', ['G', 'R', 'AE0', 'N', 'D', 'UW1', 'R']), 'grandfather': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'F', 'AA2', 'DH', 'ER0']), 'grandfatherly': ('RB', ['G', 'R', 'AE1', 'N', 'D', 'F', 'AA2', 'DH', 'ER0', 'L', 'IY0']), 'grandiloquent': ('NN', ['G', 'R', 'AE0', 'N', 'D', 'IH1', 'L', 'AH0', 'K', 'W', 'AH0', 'N', 'T']), 'grandiose': ('NN', ['G', 'R', 'AE2', 'N', 'D', 'IY0', 'OW1', 'S']), 'grandly': ('RB', ['G', 'R', 'AE1', 'N', 'D', 'L', 'IY0']), 'grandma': ('NN', ['G', 'R', 'AE1', 'M', 'AA0']), 'grandmother': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'M', 'AH2', 'DH', 'ER0']), 'grandmotherly': ('RB', ['G', 'R', 'AE1', 'N', 'D', 'M', 'AH2', 'DH', 'ER0', 'L', 'IY0']), 'grandnephew': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'N', 'EH1', 'F', 'Y', 'UW0']), 'grandpa': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'P', 'AA2']), 'grandson': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'S', 'AH2', 'N']), 'granduncle': ('NN', ['G', 'R', 'AE1', 'N', 'D', 'AH1', 'NG', 'K', 'AH0', 'L']), 'grange': ('NN', ['G', 'R', 'EY1', 'N', 'JH']), 'granger': ('NN', ['G', 'R', 'EY1', 'N', 'JH', 'ER0']), 'granite': ('NN', ['G', 'R', 'AE1', 'N', 'AH0', 'T']), 'granitic': ('JJ', ['G', 'R', 'AH0', 'N', 'IH1', 'T', 'IH0', 'K']), 'granny': ('NN', ['G', 'R', 'AE1', 'N', 'IY0']), 'granted': ('VBN', ['G', 'R', 'AE1', 'N', 'T', 'AH0', 'D']), 'granting': ('VBG', ['G', 'R', 'AE1', 'N', 'T', 'IH0', 'NG']), 'grant': ('NN', ['G', 'R', 'AE1', 'N', 'T']), 'grantor': ('NN', ['G', 'R', 'AE1', 'N', 'T', 'ER0']), 'granular': ('NN', ['G', 'R', 'AE1', 'N', 'Y', 'AH0', 'L', 'ER0']), 'granulation': ('NN', ['G', 'R', 'AE2', 'N', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'granule': ('NN', ['G', 'R', 'AE1', 'N', 'Y', 'AH0', 'L']), 'grape': ('NN', ['G', 'R', 'EY1', 'P']), 'grapeshot': ('NN', ['G', 'R', 'EY1', 'P', 'SH', 'AA2', 'T']), 'grapevine': ('NN', ['G', 'R', 'EY1', 'P', 'V', 'AY2', 'N']), 'graphic': ('JJ', ['G', 'R', 'AE1', 'F', 'IH0', 'K']), 'graphical': ('JJ', ['G', 'R', 'AE1', 'F', 'IH0', 'K', 'AH0', 'L']), 'graphically': ('RB', ['G', 'R', 'AE1', 'F', 'IH0', 'K', 'L', 'IY0']), 'graphics': ('NNS', ['G', 'R', 'AE1', 'F', 'IH0', 'K', 'S']), 'graphite': ('NN', ['G', 'R', 'AE1', 'F', 'AY2', 'T']), 'graphology': ('NN', ['G', 'R', 'AH0', 'F', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'grappled': ('VBN', ['G', 'R', 'AE1', 'P', 'AH0', 'L', 'D']), 'grappling': ('VBG', ['G', 'R', 'AE1', 'P', 'L', 'IH0', 'NG']), 'grapple': ('NN', ['G', 'R', 'AE1', 'P', 'AH0', 'L']), 'grasp': ('NN', ['G', 'R', 'AE1', 'S', 'P']), 'grasping': ('VBG', ['G', 'R', 'AE1', 'S', 'P', 'IH0', 'NG']), 'grass': ('NN', ['G', 'R', 'AE1', 'S']), 'grassed': ('VBN', ['G', 'R', 'AE1', 'S', 'T']), 'grasshopper': ('NN', ['G', 'R', 'AE1', 'S', 'HH', 'AA2', 'P', 'ER0']), 'grassy': ('NN', ['G', 'R', 'AE1', 'S', 'IY0']), 'grate': ('NN', ['G', 'R', 'EY1', 'T']), 'grated': ('VBN', ['G', 'R', 'EY1', 'T', 'IH0', 'D']), 'grating': ('VBG', ['G', 'R', 'EY1', 'T', 'IH0', 'NG']), 'grateful': ('NN', ['G', 'R', 'EY1', 'T', 'F', 'AH0', 'L']), 'grater': ('NN', ['G', 'R', 'EY1', 'T', 'ER0']), 'gratification': ('NN', ['G', 'R', 'AE2', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'gratified': ('VBN', ['G', 'R', 'AE1', 'T', 'AH0', 'F', 'AY2', 'D']), 'gratifying': ('VBG', ['G', 'R', 'AE1', 'T', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'gratify': ('NN', ['G', 'R', 'AE1', 'T', 'AH0', 'F', 'AY2']), 'gratis': ('NN', ['G', 'R', 'AE1', 'T', 'AH0', 'S']), 'gratitude': ('NN', ['G', 'R', 'AE1', 'T', 'AH0', 'T', 'UW2', 'D']), 'gratuitous': ('JJ', ['G', 'R', 'AH0', 'T', 'UW1', 'AH0', 'T', 'AH0', 'S']), 'gratuity': ('NN', ['G', 'R', 'AH0', 'T', 'UW1', 'IH0', 'T', 'IY0']), 'grave': ('NN', ['G', 'R', 'EY1', 'V']), 'graven': ('NN', ['G', 'R', 'EY1', 'V', 'AH0', 'N']), 'gravel': ('NN', ['G', 'R', 'AE1', 'V', 'AH0', 'L']), 'gravelly': ('RB', ['G', 'R', 'AE1', 'V', 'AH0', 'L', 'IY0']), 'gravely': ('RB', ['G', 'R', 'EY1', 'V', 'L', 'IY0']), 'graver': ('NN', ['G', 'R', 'EY1', 'V', 'ER0']), 'graves': ('NNS', ['G', 'R', 'EY1', 'V', 'Z']), 'gravestone': ('NN', ['G', 'R', 'EY1', 'V', 'S', 'T', 'OW2', 'N']), 'graveyard': ('NN', ['G', 'R', 'EY1', 'V', 'Y', 'AA2', 'R', 'D']), 'gravimeter': ('NN', ['G', 'R', 'AE1', 'V', 'AH0', 'M', 'IY2', 'T', 'ER0']), 'gravimetric': ('NN', ['G', 'R', 'AE2', 'V', 'AH0', 'M', 'EH1', 'T', 'R', 'IH0', 'K']), 'gravitated': ('VBN', ['G', 'R', 'AE1', 'V', 'AH0', 'T', 'EY2', 'T', 'IH0', 'D']), 'gravitating': ('VBG', ['G', 'R', 'AE1', 'V', 'IH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'gravitate': ('NN', ['G', 'R', 'AE1', 'V', 'IH0', 'T', 'EY2', 'T']), 'gravitation': ('NN', ['G', 'R', 'AE2', 'V', 'IH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'gravitational': ('NN', ['G', 'R', 'AE2', 'V', 'IH0', 'T', 'EY1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'gravity': ('NN', ['G', 'R', 'AE1', 'V', 'AH0', 'T', 'IY0']), 'gravies': ('NNS', ['G', 'R', 'EY1', 'V', 'IY0', 'Z']), 'gravy': ('NN', ['G', 'R', 'EY1', 'V', 'IY0']), 'gray': ('NN', ['G', 'R', 'EY1']), 'graybeard': ('NN', ['G', 'R', 'EY1', 'B', 'IY0', 'R', 'D']), 'grayish': ('NN', ['G', 'R', 'EY1', 'IH0', 'SH']), 'grayling': ('VBG', ['G', 'R', 'EY1', 'L', 'IH0', 'NG']), 'grazed': ('VBN', ['G', 'R', 'EY1', 'Z', 'D']), 'grazing': ('NN', ['G', 'R', 'EY1', 'Z', 'IH0', 'NG']), 'graze': ('NN', ['G', 'R', 'EY1', 'Z']), 'grazer': ('NN', ['G', 'R', 'EY1', 'Z', 'ER0']), 'grazier': ('NN', ['G', 'R', 'EY1', 'Z', 'IY0', 'ER0']), 'grease': ('NN', ['G', 'R', 'IY1', 'S']), 'greased': ('VBN', ['G', 'R', 'IY1', 'S', 'T']), 'greasing': ('VBG', ['G', 'R', 'IY1', 'S', 'IH0', 'NG']), 'greaser': ('NN', ['G', 'R', 'IY1', 'S', 'ER0']), 'greasy': ('NN', ['G', 'R', 'IY1', 'S', 'IY0']), 'great': ('JJ', ['G', 'R', 'EY1', 'T']), 'greatly': ('RB', ['G', 'R', 'EY1', 'T', 'L', 'IY0']), 'greatness': ('NN', ['G', 'R', 'EY1', 'T', 'N', 'AH0', 'S']), 'greave': ('NN', ['G', 'R', 'IY1', 'V']), 'greaves': ('NNS', ['G', 'R', 'IY1', 'V', 'Z']), 'grebe': ('NN', ['G', 'R', 'IY1', 'B']), 'grecian': ('JJ', ['G', 'R', 'IY1', 'SH', 'AH0', 'N']), 'greco-roman': ('NN', ['G', 'R', 'EH2', 'K', 'OW0', 'R', 'OW1', 'M', 'AH0', 'N']), 'greece': ('NN', ['G', 'R', 'IY1', 'S']), 'grice': ('NN', ['G', 'R', 'AY1', 'S']), 'grise': ('NN', ['G', 'R', 'AY1', 'Z']), 'greed': ('NN', ['G', 'R', 'IY1', 'D']), 'greedily': ('RB', ['G', 'R', 'IY1', 'D', 'AH0', 'L', 'IY0']), 'greedy': ('NN', ['G', 'R', 'IY1', 'D', 'IY0']), 'greek': ('NN', ['G', 'R', 'IY1', 'K']), 'green': ('JJ', ['G', 'R', 'IY1', 'N']), 'greened': ('VBN', ['G', 'R', 'IY1', 'N', 'D']), 'greening': ('VBG', ['G', 'R', 'IY1', 'N', 'IH0', 'NG']), 'greenback': ('NN', ['G', 'R', 'IY1', 'N', 'B', 'AE2', 'K']), 'greenery': ('NN', ['G', 'R', 'IY1', 'N', 'ER0', 'IY0']), 'greenhouse': ('NN', ['G', 'R', 'IY1', 'N', 'HH', 'AW2', 'S']), 'greenish': ('NN', ['G', 'R', 'IY1', 'N', 'IH0', 'SH']), 'greenly': ('RB', ['G', 'R', 'IY1', 'N', 'L', 'IY0']), 'greenness': ('NN', ['G', 'R', 'IY1', 'N', 'N', 'AH0', 'S']), 'greenstone': ('NN', ['G', 'R', 'IY1', 'N', 'S', 'T', 'OW2', 'N']), 'greenwood': ('NN', ['G', 'R', 'IY1', 'N', 'W', 'UH2', 'D']), 'greet': ('NN', ['G', 'R', 'IY1', 'T']), 'greeted': ('VBN', ['G', 'R', 'IY1', 'T', 'AH0', 'D']), 'greeting': ('VBG', ['G', 'R', 'IY1', 'T', 'IH0', 'NG']), 'gregarious': ('JJ', ['G', 'R', 'AH0', 'G', 'EH1', 'R', 'IY0', 'AH0', 'S']), 'grego': ('NN', ['G', 'R', 'EH1', 'G', 'OW0']), 'gregorian': ('JJ', ['G', 'R', 'AH0', 'G', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'grenade': ('NN', ['G', 'R', 'AH0', 'N', 'EY1', 'D']), 'grete': ('NN', ['G', 'R', 'IY1', 'T']), 'greve': ('NN', ['G', 'R', 'IY1', 'V']), 'grew': ('VBD', ['G', 'R', 'UW1']), 'gruesome': ('NN', ['G', 'R', 'UW1', 'S', 'AH0', 'M']), 'grey': ('NN', ['G', 'R', 'EY1']), 'greyhound': ('NN', ['G', 'R', 'EY1', 'HH', 'AW2', 'N', 'D']), 'greylag': ('NN', ['G', 'R', 'EY1', 'L', 'AE2', 'G']), 'gribble': ('JJ', ['G', 'R', 'IH1', 'B', 'AH0', 'L']), 'grid': ('NN', ['G', 'R', 'IH1', 'D']), 'gridiron': ('NN', ['G', 'R', 'IH1', 'D', 'AY2', 'ER0', 'N']), 'grief': ('NN', ['G', 'R', 'IY1', 'F']), 'griego': ('NN', ['G', 'R', 'IY1', 'G', 'OW0']), 'grievance': ('NN', ['G', 'R', 'IY1', 'V', 'AH0', 'N', 'S']), 'grieve': ('NN', ['G', 'R', 'IY1', 'V']), 'grieved': ('VBN', ['G', 'R', 'IY1', 'V', 'D']), 'grieving': ('VBG', ['G', 'R', 'IY1', 'V', 'IH0', 'NG']), 'griever': ('NN', ['G', 'R', 'IY1', 'V', 'ER0']), 'grievous': ('JJ', ['G', 'R', 'IY1', 'V', 'AH0', 'S']), 'griff': ('NN', ['G', 'R', 'IH1', 'F']), 'griffin': ('NN', ['G', 'R', 'IH1', 'F', 'IH0', 'N']), 'griffon': ('NN', ['G', 'R', 'IH1', 'F', 'AH0', 'N']), 'grill': ('NN', ['G', 'R', 'IH1', 'L']), 'grilled': ('VBN', ['G', 'R', 'IH1', 'L', 'D']), 'grilling': ('VBG', ['G', 'R', 'IH1', 'L', 'IH0', 'NG']), 'grille': ('NN', ['G', 'R', 'IH1', 'L']), 'grim': ('NN', ['G', 'R', 'IH1', 'M']), 'grimace': ('NN', ['G', 'R', 'IH1', 'M', 'AH0', 'S']), 'grimaced': ('VBN', ['G', 'R', 'IH1', 'M', 'AH0', 'S', 'T']), 'grime': ('NN', ['G', 'R', 'AY1', 'M']), 'grimly': ('NN', ['G', 'R', 'IH1', 'M', 'L', 'IY0']), 'grimme': ('NN', ['G', 'R', 'IH1', 'M']), 'grimness': ('NN', ['G', 'R', 'IH1', 'M', 'N', 'AH0', 'S']), 'grimy': ('NN', ['G', 'R', 'AY1', 'M', 'IY0']), 'grin': ('NN', ['G', 'R', 'IH1', 'N']), 'grinned': ('VBN', ['G', 'R', 'IH1', 'N', 'D']), 'grinning': ('VBG', ['G', 'R', 'IH1', 'N', 'IH0', 'NG']), 'ground': ('NN', ['G', 'R', 'AW1', 'N', 'D']), 'grinding': ('VBG', ['G', 'R', 'AY1', 'N', 'D', 'IH0', 'NG']), 'grind': ('NN', ['G', 'R', 'AY1', 'N', 'D']), 'grinder': ('NN', ['G', 'R', 'AY1', 'N', 'D', 'ER0']), 'grindle': ('NN', ['G', 'R', 'IH1', 'N', 'D', 'AH0', 'L']), 'grindstone': ('NN', ['G', 'R', 'AY1', 'N', 'D', 'S', 'T', 'OW2', 'N']), 'grip': ('NN', ['G', 'R', 'IH1', 'P']), 'gripe': ('NN', ['G', 'R', 'AY1', 'P']), 'griped': ('NNS', ['G', 'R', 'AY1', 'P', 'T']), 'griping': ('VBG', ['G', 'R', 'AY1', 'P', 'IH0', 'NG']), 'grisly': ('RB', ['G', 'R', 'IH1', 'Z', 'L', 'IY0']), 'grist': ('NN', ['G', 'R', 'IH1', 'S', 'T']), 'gristle': ('NN', ['G', 'R', 'IH1', 'S', 'AH0', 'L']), 'grit': ('NN', ['G', 'R', 'IH1', 'T']), 'gritting': ('VBG', ['G', 'R', 'IH1', 'T', 'IH0', 'NG']), 'gritty': ('NN', ['G', 'R', 'IH1', 'T', 'IY0']), 'grizzle': ('NN', ['G', 'R', 'IH1', 'Z', 'AH0', 'L']), 'grizzled': ('VBN', ['G', 'R', 'IH1', 'Z', 'AH0', 'L', 'D']), 'grizzly': ('NN', ['G', 'R', 'IH1', 'Z', 'L', 'IY0']), 'grizzlies': ('NNS', ['G', 'R', 'IH1', 'Z', 'L', 'IY0', 'Z']), 'groaned': ('VBN', ['G', 'R', 'OW1', 'N', 'D']), 'groaning': ('VBG', ['G', 'R', 'OW1', 'N', 'IH0', 'NG']), 'groan': ('NN', ['G', 'R', 'OW1', 'N']), 'groat': ('NN', ['G', 'R', 'OW1', 'T']), 'grocer': ('NN', ['G', 'R', 'OW1', 'S', 'ER0']), 'groceries': ('NNS', ['G', 'R', 'OW1', 'S', 'ER0', 'IY0', 'Z']), 'grocery': ('NN', ['G', 'R', 'OW1', 'S', 'ER0', 'IY0']), 'grog': ('NN', ['G', 'R', 'AA1', 'G']), 'groggy': ('NN', ['G', 'R', 'AA1', 'G', 'IY0']), 'groin': ('NN', ['G', 'R', 'OY1', 'N']), 'groined': ('VBN', ['G', 'R', 'OY1', 'N', 'D']), 'groom': ('NN', ['G', 'R', 'UW1', 'M']), 'groomed': ('VBN', ['G', 'R', 'UW1', 'M', 'D']), 'grooming': ('VBG', ['G', 'R', 'UW1', 'M', 'IH0', 'NG']), 'groomer': ('NN', ['G', 'R', 'UW1', 'M', 'ER0']), 'groove': ('NN', ['G', 'R', 'UW1', 'V']), 'groover': ('NN', ['G', 'R', 'UW1', 'V', 'ER0']), 'groped': ('NNS', ['G', 'R', 'OW1', 'P', 'T']), 'groping': ('VBG', ['G', 'R', 'OW1', 'P', 'IH0', 'NG']), 'grope': ('NN', ['G', 'R', 'OW1', 'P']), 'gros': ('NNS', ['G', 'R', 'OW1', 'S']), 'grosbeak': ('NN', ['G', 'R', 'OW1', 'S', 'B', 'IY2', 'K']), 'gross': ('JJ', ['G', 'R', 'OW1', 'S']), 'grossly': ('RB', ['G', 'R', 'OW1', 'S', 'L', 'IY0']), 'grote': ('NN', ['G', 'R', 'OW1', 'T']), 'grotesque': ('NN', ['G', 'R', 'OW0', 'T', 'EH1', 'S', 'K']), 'grotesquely': ('RB', ['G', 'R', 'OW0', 'T', 'EH1', 'S', 'K', 'L', 'IY0']), 'grotto': ('NN', ['G', 'R', 'AA1', 'T', 'OW2']), 'grounded': ('VBN', ['G', 'R', 'AW1', 'N', 'D', 'IH0', 'D']), 'grounding': ('VBG', ['G', 'R', 'AW1', 'N', 'D', 'IH0', 'NG']), 'groundless': ('NN', ['G', 'R', 'AW1', 'N', 'D', 'L', 'AH0', 'S']), 'groundling': ('VBG', ['G', 'R', 'AW1', 'N', 'D', 'L', 'IH0', 'NG']), 'groundnut': ('NN', ['G', 'R', 'AW1', 'N', 'D', 'N', 'AH2', 'T']), 'groundwork': ('NN', ['G', 'R', 'AW1', 'N', 'D', 'W', 'ER2', 'K']), 'group': ('NN', ['G', 'R', 'UW1', 'P']), 'grouped': ('NNS', ['G', 'R', 'UW1', 'P', 'T']), 'grouping': ('VBG', ['G', 'R', 'UW1', 'P', 'IH0', 'NG']), 'grouper': ('NN', ['G', 'R', 'UW1', 'P', 'ER0']), 'grouse': ('NN', ['G', 'R', 'AW1', 'S']), 'grout': ('NN', ['G', 'R', 'AW1', 'T']), 'grouting': ('VBG', ['G', 'R', 'AW1', 'T', 'IH0', 'NG']), 'grove': ('NN', ['G', 'R', 'OW1', 'V']), 'groveling': ('VBG', ['G', 'R', 'AO1', 'V', 'AH0', 'L', 'IH0', 'NG']), 'grovel': ('NN', ['G', 'R', 'AA1', 'V', 'AH0', 'L']), 'growing': ('VBG', ['G', 'R', 'OW1', 'IH0', 'NG']), 'grow': ('NN', ['G', 'R', 'OW1']), 'grower': ('NN', ['G', 'R', 'OW1', 'ER0']), 'growled': ('VBN', ['G', 'R', 'AW1', 'L', 'D']), 'growling': ('VBG', ['G', 'R', 'OW1', 'L', 'IH0', 'NG']), 'growl': ('NN', ['G', 'R', 'AW1', 'L']), 'grown': ('NN', ['G', 'R', 'OW1', 'N']), 'growth': ('NN', ['G', 'R', 'OW1', 'TH']), 'grub': ('NN', ['G', 'R', 'AH1', 'B']), 'grubby': ('NN', ['G', 'R', 'AH1', 'B', 'IY0']), 'grudging': ('VBG', ['G', 'R', 'AH1', 'JH', 'IH0', 'NG']), 'grudge': ('NN', ['G', 'R', 'AH1', 'JH']), 'grudgingly': ('RB', ['G', 'R', 'AH1', 'JH', 'IH0', 'NG', 'L', 'IY0']), 'gruel': ('NN', ['G', 'R', 'UW1', 'IH0', 'L']), 'gruff': ('NN', ['G', 'R', 'AH1', 'F']), 'grum': ('NN', ['G', 'R', 'AH1', 'M']), 'grumbling': ('VBG', ['G', 'R', 'AH1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'grumble': ('JJ', ['G', 'R', 'AH1', 'M', 'B', 'AH0', 'L']), 'grumpy': ('NN', ['G', 'R', 'AH1', 'M', 'P', 'IY0']), 'grunted': ('VBN', ['G', 'R', 'AH1', 'N', 'T', 'IH0', 'D']), 'grunting': ('VBG', ['G', 'R', 'AH1', 'N', 'T', 'IH0', 'NG']), 'grunt': ('NN', ['G', 'R', 'AH1', 'N', 'T']), 'guanaco': ('NN', ['G', 'W', 'AH0', 'N', 'AA1', 'K', 'OW2']), 'guano': ('NN', ['G', 'W', 'AA1', 'N', 'OW2']), 'guarantees': ('NNS', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'IY1', 'Z']), 'guarantee': ('NN', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'IY1']), 'guaranteed': ('NN', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'IY1', 'D']), 'guaranteeing': ('VBG', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'IY1', 'IH0', 'NG']), 'guarantor': ('NN', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'AO1', 'R']), 'guaranty': ('NN', ['G', 'EH2', 'R', 'AH0', 'N', 'T', 'IY1']), 'guarded': ('VBN', ['G', 'AA1', 'R', 'D', 'AH0', 'D']), 'guard': ('NN', ['G', 'AA1', 'R', 'D']), 'guardfish': ('NN', ['G', 'AA1', 'R', 'D', 'F', 'IH2', 'SH']), 'guardian': ('NN', ['G', 'AA1', 'R', 'D', 'IY0', 'AH0', 'N']), 'guardianship': ('NN', ['G', 'AA1', 'R', 'D', 'IY0', 'AH0', 'N', 'SH', 'IH0', 'P']), 'guards': ('NNS', ['G', 'AA1', 'R', 'D', 'Z']), 'guardsmen': ('NNS', ['G', 'AA1', 'R', 'D', 'Z', 'M', 'IH0', 'N']), 'guardsman': ('NN', ['G', 'AA1', 'R', 'D', 'Z', 'M', 'AE2', 'N']), 'guava': ('NN', ['G', 'W', 'AA1', 'V', 'AH0']), 'gubernatorial': ('JJ', ['G', 'UW0', 'B', 'ER0', 'N', 'AH0', 'T', 'AO1', 'R', 'IY0', 'AH0', 'L']), 'gudgeon': ('NN', ['G', 'AH1', 'JH', 'AH0', 'N']), 'gue': ('NN', ['G', 'Y', 'UW1']), 'guerilla': ('NN', ['G', 'ER0', 'IH1', 'L', 'AH0']), 'guerrilla': ('NN', ['G', 'ER0', 'IH1', 'L', 'AH0']), 'guessed': ('VBN', ['G', 'EH1', 'S', 'T']), 'guessing': ('VBG', ['G', 'EH1', 'S', 'IH0', 'NG']), 'guess': ('NN', ['G', 'EH1', 'S']), 'guesser': ('NN', ['G', 'EH1', 'S', 'ER0']), 'guesswork': ('NN', ['G', 'EH1', 'S', 'W', 'ER2', 'K']), 'guest': ('NN', ['G', 'EH1', 'S', 'T']), 'guffaw': ('NN', ['G', 'AH0', 'F', 'AO1']), 'guidance': ('NN', ['G', 'AY1', 'D', 'AH0', 'N', 'S']), 'guided': ('VBN', ['G', 'AY1', 'D', 'AH0', 'D']), 'guiding': ('VBG', ['G', 'AY1', 'D', 'IH0', 'NG']), 'guidebook': ('NN', ['G', 'AY1', 'D', 'B', 'UH2', 'K']), 'guidepost': ('NN', ['G', 'AY1', 'D', 'P', 'OW2', 'S', 'T']), 'guider': ('NN', ['G', 'AY1', 'D', 'ER0']), 'guild': ('NN', ['G', 'IH1', 'L', 'D']), 'guilder': ('NN', ['G', 'IH1', 'L', 'D', 'ER0']), 'guildhall': ('NN', ['G', 'IH1', 'L', 'D', 'HH', 'AO2', 'L']), 'guile': ('NN', ['G', 'AY1', 'L']), 'guileless': ('NN', ['G', 'AY1', 'L', 'L', 'AH0', 'S']), 'guillotine': ('NN', ['G', 'IH1', 'L', 'AH0', 'T', 'IY2', 'N']), 'guilt': ('NN', ['G', 'IH1', 'L', 'T']), 'guiltless': ('NN', ['G', 'IH1', 'L', 'T', 'L', 'IH0', 'S']), 'guilty': ('JJ', ['G', 'IH1', 'L', 'T', 'IY0']), 'guinea': ('NN', ['G', 'IH1', 'N', 'IY0']), 'guise': ('NN', ['G', 'AY1', 'Z']), 'guitar': ('NN', ['G', 'IH0', 'T', 'AA1', 'R']), 'gulas': ('NNS', ['G', 'Y', 'UW1', 'L', 'AH0', 'Z']), 'gula': ('NN', ['G', 'Y', 'UW1', 'L', 'AH0']), 'gulch': ('NN', ['G', 'AH1', 'L', 'CH']), 'gulf': ('NN', ['G', 'AH1', 'L', 'F']), 'gull': ('NN', ['G', 'AH1', 'L']), 'gullet': ('NN', ['G', 'AH1', 'L', 'AH0', 'T']), 'gullible': ('JJ', ['G', 'AH1', 'L', 'AH0', 'B', 'AH0', 'L']), 'gully': ('RB', ['G', 'AH1', 'L', 'IY0']), 'gullies': ('NNS', ['G', 'AH1', 'L', 'IY0', 'Z']), 'gulped': ('NNS', ['G', 'AH1', 'L', 'P', 'T']), 'gulping': ('VBG', ['G', 'AH1', 'L', 'P', 'IH0', 'NG']), 'gulp': ('NN', ['G', 'AH1', 'L', 'P']), 'gum': ('NN', ['G', 'AH1', 'M']), 'gummed': ('VBN', ['G', 'AH1', 'M', 'D']), 'gumbo': ('NN', ['G', 'AH1', 'M', 'B', 'OW0']), 'gummy': ('NN', ['G', 'AH1', 'M', 'IY0']), 'gump': ('NN', ['G', 'AH1', 'M', 'P']), 'gumption': ('NN', ['G', 'AH1', 'M', 'P', 'SH', 'AH0', 'N']), 'gunboat': ('NN', ['G', 'AH1', 'N', 'B', 'OW2', 'T']), 'guncotton': ('NN', ['G', 'AH1', 'N', 'K', 'AA1', 'T', 'AH0', 'N']), 'gunflint': ('NN', ['G', 'AH1', 'N', 'F', 'L', 'IH0', 'N', 'T']), 'gunner': ('NN', ['G', 'AH1', 'N', 'ER0']), 'gunnery': ('NN', ['G', 'AH1', 'N', 'ER0', 'IY0']), 'gunning': ('VBG', ['G', 'AH1', 'N', 'IH0', 'NG']), 'gunny': ('NN', ['G', 'AH1', 'N', 'IY0']), 'gunpowder': ('NN', ['G', 'AH1', 'N', 'P', 'AW2', 'D', 'ER0']), 'gunshot': ('NN', ['G', 'AH1', 'N', 'SH', 'AA2', 'T']), 'gurgling': ('VBG', ['G', 'ER1', 'G', 'AH0', 'L', 'IH0', 'NG']), 'gurgle': ('NN', ['G', 'ER1', 'G', 'AH0', 'L']), 'gurry': ('NN', ['G', 'ER1', 'IY0']), 'gushed': ('VBN', ['G', 'AH1', 'SH', 'T']), 'gushing': ('VBG', ['G', 'AH1', 'SH', 'IH0', 'NG']), 'gush': ('NN', ['G', 'AH1', 'SH']), 'gusher': ('NN', ['G', 'AH1', 'SH', 'ER0']), 'gust': ('NN', ['G', 'AH1', 'S', 'T']), 'gusto': ('NN', ['G', 'AH1', 'S', 'T', 'OW2']), 'gusty': ('NN', ['G', 'AH1', 'S', 'T', 'IY0']), 'gut': ('NN', ['G', 'AH1', 'T']), 'gutted': ('VBN', ['G', 'AH1', 'T', 'IH0', 'D']), 'gutting': ('VBG', ['G', 'AH1', 'T', 'IH0', 'NG']), 'gutter': ('NN', ['G', 'AH1', 'T', 'ER0']), 'guttered': ('VBN', ['G', 'AH1', 'T', 'ER0', 'D']), 'guttering': ('VBG', ['G', 'AH1', 'T', 'ER0', 'IH0', 'NG']), 'guttural': ('JJ', ['G', 'AH1', 'T', 'ER0', 'AH0', 'L']), 'guy': ('NN', ['G', 'AY1']), 'guzzling': ('VBG', ['G', 'AH1', 'Z', 'AH0', 'L', 'IH0', 'NG']), 'guzzle': ('NN', ['G', 'AH1', 'Z', 'AH0', 'L']), 'guzzler': ('NN', ['G', 'AH1', 'Z', 'L', 'ER0']), 'gymnasiums': ('NNS', ['JH', 'IH0', 'M', 'N', 'EY1', 'Z', 'IY0', 'AH0', 'M', 'Z']), 'gymnasia': ('NN', ['JH', 'IH0', 'M', 'N', 'EY1', 'Z', 'IY0', 'AH0']), 'gymnasium': ('NN', ['JH', 'IH0', 'M', 'N', 'EY1', 'Z', 'IY0', 'AH0', 'M']), 'gymnast': ('NN', ['JH', 'IH1', 'M', 'N', 'AH0', 'S', 'T']), 'gymnastic': ('JJ', ['JH', 'IH0', 'M', 'N', 'AE1', 'S', 'T', 'IH0', 'K']), 'gymnastics': ('NNS', ['JH', 'IH0', 'M', 'N', 'AE1', 'S', 'T', 'IH0', 'K', 'S']), 'gyn': ('NN', ['G', 'IH1', 'N']), 'gynecological': ('JJ', ['G', 'AY2', 'N', 'AH0', 'K', 'AH0', 'L', 'AA1', 'JH', 'IH0', 'K', 'AH0', 'L']), 'gynecology': ('NN', ['G', 'AY2', 'N', 'AH0', 'K', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'gyp': ('NN', ['JH', 'IH1', 'P']), 'gypsum': ('NN', ['JH', 'IH1', 'P', 'S', 'AH0', 'M']), 'gypsies': ('NNS', ['JH', 'IH1', 'P', 'S', 'IY0', 'Z']), 'gypsy': ('NN', ['JH', 'IH1', 'P', 'S', 'IY0']), 'gyrate': ('NN', ['JH', 'AY1', 'R', 'EY2', 'T']), 'gyrated': ('VBN', ['JH', 'AY1', 'R', 'EY2', 'T', 'IH0', 'D']), 'gyrating': ('VBG', ['JH', 'AY1', 'R', 'EY2', 'T', 'IH0', 'NG']), 'gyration': ('NN', ['JH', 'AY0', 'R', 'EY1', 'SH', 'AH0', 'N']), 'gyroscope': ('NN', ['JH', 'AY1', 'R', 'AH0', 'S', 'K', 'OW2', 'P']), 'gyroscopic': ('NN', ['JH', 'AY2', 'R', 'AH0', 'S', 'K', 'AA1', 'P', 'IH0', 'K']), 'h': ('NN', ['EY1', 'CH']), 'ha': ('NN', ['HH', 'AA1']), 'haaf': ('NN', ['HH', 'AA1', 'F']), 'haak': ('NN', ['HH', 'AA1', 'K']), 'haar': ('NN', ['HH', 'AA1', 'R']), 'haberdashery': ('NN', ['HH', 'AE1', 'B', 'ER0', 'D', 'AE2', 'SH', 'ER0', 'IY0']), 'habit': ('NN', ['HH', 'AE1', 'B', 'AH0', 'T']), 'habitable': ('JJ', ['HH', 'AE1', 'B', 'AH0', 'T', 'AH0', 'B', 'AH0', 'L']), 'habitat': ('NN', ['HH', 'AE1', 'B', 'AH0', 'T', 'AE2', 'T']), 'habitation': ('NN', ['HH', 'AE2', 'B', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'habitual': ('JJ', ['HH', 'AH0', 'B', 'IH1', 'CH', 'UW0', 'AH0', 'L']), 'hable': ('JJ', ['HH', 'EY1', 'B', 'AH0', 'L']), 'hacienda': ('NN', ['HH', 'AE2', 'S', 'IY0', 'EH1', 'N', 'D', 'AH0']), 'hack': ('NN', ['HH', 'AE1', 'K']), 'hacked': ('VBN', ['HH', 'AE1', 'K', 'T']), 'hacking': ('VBG', ['HH', 'AE1', 'K', 'IH0', 'NG']), 'hackberry': ('NN', ['HH', 'AE1', 'K', 'B', 'EH2', 'R', 'IY0']), 'hacker': ('NN', ['HH', 'AE1', 'K', 'ER0']), 'hackle': ('NN', ['HH', 'AE1', 'K', 'AH0', 'L']), 'hackman': ('NN', ['HH', 'AE1', 'K', 'M', 'AE2', 'N']), 'hackmatack': ('NN', ['HH', 'AE1', 'K', 'M', 'AH0', 'T', 'AE2', 'K']), 'hackney': ('NN', ['HH', 'AE1', 'K', 'N', 'IY0']), 'hackneyed': ('NN', ['HH', 'AE1', 'K', 'N', 'IY0', 'D']), 'had': ('VBD', ['HH', 'AE1', 'D']), 'haddock': ('NN', ['HH', 'AE1', 'D', 'AH0', 'K']), 'hade': ('NN', ['HH', 'EY1', 'D']), 'hades': ('NNS', ['HH', 'EY1', 'D', 'IY0', 'Z']), 'haft': ('NN', ['HH', 'AE1', 'F', 'T']), 'hag': ('NN', ['HH', 'AE1', 'G']), 'haggard': ('NN', ['HH', 'AE1', 'G', 'ER0', 'D']), 'haggled': ('VBN', ['HH', 'AE1', 'G', 'AH0', 'L', 'D']), 'haggling': ('VBG', ['HH', 'AE1', 'G', 'AH0', 'L', 'IH0', 'NG']), 'haggle': ('NN', ['HH', 'AE1', 'G', 'AH0', 'L']), 'hagiography': ('NN', ['HH', 'AE2', 'G', 'IY0', 'AA1', 'G', 'R', 'AH0', 'F', 'IY0']), 'hah': ('NN', ['HH', 'AA1']), 'haik': ('NN', ['HH', 'EY1', 'K']), 'hail': ('NN', ['HH', 'EY1', 'L']), 'halting': ('VBG', ['HH', 'AO1', 'L', 'T', 'IH0', 'NG']), 'hailstone': ('NN', ['HH', 'EY1', 'L', 'S', 'T', 'OW2', 'N']), 'hailstorm': ('NN', ['HH', 'EY1', 'L', 'S', 'T', 'AO2', 'R', 'M']), 'han': ('NN', ['HH', 'AA1', 'N']), 'hair': ('NN', ['HH', 'EH1', 'R']), 'hairdresser': ('NN', ['HH', 'EH1', 'R', 'D', 'R', 'EH2', 'S', 'ER0']), 'haired': ('VBN', ['HH', 'EH1', 'R', 'D']), 'hairiness': ('NN', ['HH', 'EH1', 'R', 'IY0', 'N', 'AH0', 'S']), 'hairless': ('NN', ['HH', 'EH1', 'R', 'L', 'AH0', 'S']), 'hairy': ('NN', ['HH', 'EH1', 'R', 'IY0']), 'haitian': ('NN', ['HH', 'EY1', 'SH', 'AH0', 'N']), 'haye': ('NN', ['HH', 'EY1']), 'hake': ('NN', ['HH', 'EY1', 'K']), 'hakim': ('NN', ['HH', 'AA0', 'K', 'IY1', 'M']), 'halcyon': ('NN', ['HH', 'AE1', 'L', 'S', 'IY0', 'AH0', 'N']), 'hale': ('NN', ['HH', 'EY1', 'L']), 'half': ('NN', ['HH', 'AE1', 'F']), 'halves': ('NNS', ['HH', 'AE1', 'V', 'Z']), 'halfway': ('NN', ['HH', 'AE1', 'F', 'W', 'EY1']), 'halibut': ('NN', ['HH', 'AE1', 'L', 'AH0', 'B', 'AH0', 'T']), 'halite': ('NN', ['HH', 'AE1', 'L', 'AY0', 'T']), 'hall': ('NN', ['HH', 'AO1', 'L']), 'hallelujah': ('NN', ['HH', 'AE2', 'L', 'AH0', 'L', 'UW1', 'Y', 'AH0']), 'hallowed': ('VBN', ['HH', 'AE1', 'L', 'OW0', 'D']), 'hallow': ('NN', ['HH', 'AE1', 'L', 'OW0']), 'halloween': ('NN', ['HH', 'AE2', 'L', 'AH0', 'W', 'IY1', 'N']), 'hallucinate': ('NN', ['HH', 'AH0', 'L', 'UW1', 'S', 'AH0', 'N', 'EY0', 'T']), 'hallucination': ('NN', ['HH', 'AH0', 'L', 'UW2', 'S', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'hallucinatory': ('NN', ['HH', 'AH0', 'L', 'UW1', 'S', 'AH0', 'N', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'halm': ('NN', ['HH', 'AA1', 'M']), 'halos': ('NN', ['HH', 'EY1', 'L', 'OW0', 'Z']), 'halo': ('NN', ['HH', 'EY1', 'L', 'OW0']), 'halogen': ('NN', ['HH', 'AE1', 'L', 'AH0', 'JH', 'AH0', 'N']), 'halt': ('NN', ['HH', 'AO1', 'L', 'T']), 'halted': ('VBN', ['HH', 'AO1', 'L', 'T', 'AH0', 'D']), 'halter': ('NN', ['HH', 'AO1', 'L', 'T', 'ER0']), 'haltingly': ('RB', ['HH', 'AO1', 'L', 'T', 'IH0', 'NG', 'L', 'IY0']), 'halve': ('NN', ['HH', 'AE1', 'V']), 'halved': ('VBN', ['HH', 'AE1', 'V', 'D']), 'halving': ('VBG', ['HH', 'AE1', 'V', 'IH0', 'NG']), 'ham': ('NN', ['HH', 'AE1', 'M']), 'hamburg': ('NN', ['HH', 'AE1', 'M', 'B', 'ER0', 'G']), 'hamel': ('NN', ['HH', 'AE1', 'M', 'AH0', 'L']), 'hamlet': ('NN', ['HH', 'AE1', 'M', 'L', 'AH0', 'T']), 'hammer': ('NN', ['HH', 'AE1', 'M', 'ER0']), 'hammered': ('VBN', ['HH', 'AE1', 'M', 'ER0', 'D']), 'hammering': ('VBG', ['HH', 'AE1', 'M', 'ER0', 'IH0', 'NG']), 'hammerman': ('NN', ['HH', 'AE1', 'M', 'ER0', 'M', 'AH0', 'N']), 'hammock': ('NN', ['HH', 'AE1', 'M', 'AH0', 'K']), 'hamper': ('NN', ['HH', 'AE1', 'M', 'P', 'ER0']), 'hampered': ('VBN', ['HH', 'AE1', 'M', 'P', 'ER0', 'D']), 'hampering': ('VBG', ['HH', 'AE1', 'M', 'P', 'ER0', 'IH0', 'NG']), 'hamster': ('NN', ['HH', 'AE1', 'M', 'S', 'T', 'ER0']), 'hamstring': ('VBG', ['HH', 'AE1', 'M', 'S', 'T', 'R', 'IH2', 'NG']), 'hamstrung': ('NN', ['HH', 'AE1', 'M', 'S', 'T', 'R', 'AH0', 'NG']), 'hance': ('NN', ['HH', 'AE1', 'N', 'S']), 'hand': ('NN', ['HH', 'AE1', 'N', 'D']), 'handed': ('VBN', ['HH', 'AE1', 'N', 'D', 'AH0', 'D']), 'handing': ('VBG', ['HH', 'AE1', 'N', 'D', 'IH0', 'NG']), 'handbill': ('NN', ['HH', 'AE1', 'N', 'D', 'B', 'IH2', 'L']), 'handbook': ('NN', ['HH', 'AE1', 'N', 'D', 'B', 'UH2', 'K']), 'handcraft': ('NN', ['HH', 'AE1', 'N', 'D', 'K', 'R', 'AE2', 'F', 'T']), 'handcuff': ('NN', ['HH', 'AE1', 'N', 'D', 'K', 'AH2', 'F']), 'handcuffed': ('NN', ['HH', 'AE1', 'N', 'D', 'K', 'AH2', 'F', 'T']), 'handcuffing': ('VBG', ['HH', 'AE1', 'N', 'D', 'K', 'AH2', 'F', 'IH0', 'NG']), 'hander': ('NN', ['HH', 'AE1', 'N', 'D', 'ER0']), 'handful': ('NN', ['HH', 'AE1', 'N', 'D', 'F', 'UH2', 'L']), 'handicap': ('NN', ['HH', 'AE1', 'N', 'D', 'IY0', 'K', 'AE2', 'P']), 'handicapped': ('NNS', ['HH', 'AE1', 'N', 'D', 'IY0', 'K', 'AE2', 'P', 'T']), 'handicapping': ('VBG', ['HH', 'AE1', 'N', 'D', 'IY0', 'K', 'AE2', 'P', 'IH0', 'NG']), 'handicapper': ('NN', ['HH', 'AE1', 'N', 'D', 'IY0', 'K', 'AE2', 'P', 'ER0']), 'handicraft': ('NN', ['HH', 'AE1', 'N', 'D', 'IY0', 'K', 'R', 'AE2', 'F', 'T']), 'handily': ('RB', ['HH', 'AE1', 'N', 'D', 'AH0', 'L', 'IY0']), 'handiwork': ('NN', ['HH', 'AE1', 'N', 'D', 'IY0', 'W', 'ER2', 'K']), 'handkerchief': ('NN', ['HH', 'AE1', 'NG', 'K', 'ER0', 'CH', 'IH0', 'F']), 'handled': ('VBN', ['HH', 'AE1', 'N', 'D', 'AH0', 'L', 'D']), 'handling': ('VBG', ['HH', 'AE1', 'N', 'D', 'L', 'IH0', 'NG']), 'handle': ('NN', ['HH', 'AE1', 'N', 'D', 'AH0', 'L']), 'handmade': ('NN', ['HH', 'AE1', 'N', 'D', 'M', 'EY1', 'D']), 'handsaw': ('NN', ['HH', 'AE1', 'N', 'D', 'S', 'AO2']), 'handsome': ('NN', ['HH', 'AE1', 'N', 'S', 'AH0', 'M']), 'handsomely': ('RB', ['HH', 'AE1', 'N', 'S', 'AH0', 'M', 'L', 'IY0']), 'handwriting': ('VBG', ['HH', 'AE1', 'N', 'D', 'R', 'AY2', 'T', 'IH0', 'NG']), 'handy': ('NN', ['HH', 'AE1', 'N', 'D', 'IY0']), 'hanged': ('VBN', ['HH', 'AE1', 'NG', 'D']), 'hung': ('NN', ['HH', 'AH1', 'NG']), 'hanging': ('VBG', ['HH', 'AE1', 'NG', 'IH0', 'NG']), 'hang': ('NN', ['HH', 'AE1', 'NG']), 'hanger': ('NN', ['HH', 'AE1', 'NG', 'ER0']), 'hangman': ('NN', ['HH', 'AE1', 'NG', 'M', 'AH0', 'N']), 'hank': ('NN', ['HH', 'AE1', 'NG', 'K']), 'hankering': ('VBG', ['HH', 'AE1', 'NG', 'K', 'ER0', 'IH0', 'NG']), 'hanker': ('NN', ['HH', 'AE1', 'NG', 'K', 'ER0']), 'hanoverian': ('NN', ['HH', 'AE2', 'N', 'OW0', 'V', 'IH1', 'R', 'IY0', 'AH0', 'N']), 'hansard': ('NN', ['HH', 'AE1', 'N', 'S', 'ER0', 'D']), 'hanseatic': ('JJ', ['HH', 'AE2', 'N', 'S', 'IY0', 'AE1', 'T', 'IH0', 'K']), 'hansel': ('NN', ['HH', 'AE1', 'N', 'S', 'AH0', 'L']), 'hansom': ('NN', ['HH', 'AE1', 'N', 'S', 'AH0', 'M']), 'hap': ('NN', ['HH', 'AE1', 'P']), 'haphazard': ('NN', ['HH', 'AE0', 'P', 'HH', 'AE1', 'Z', 'ER0', 'D']), 'hapless': ('NN', ['HH', 'AE1', 'P', 'L', 'AH0', 'S']), 'happened': ('VBD', ['HH', 'AE1', 'P', 'AH0', 'N', 'D']), 'happening': ('VBG', ['HH', 'AE1', 'P', 'AH0', 'N', 'IH0', 'NG']), 'happen': ('VB', ['HH', 'AE1', 'P', 'AH0', 'N']), 'happily': ('RB', ['HH', 'AE1', 'P', 'AH0', 'L', 'IY0']), 'happiness': ('NN', ['HH', 'AE1', 'P', 'IY0', 'N', 'AH0', 'S']), 'happy': ('JJ', ['HH', 'AE1', 'P', 'IY0']), 'hara-kiri': ('NN', ['HH', 'AA1', 'R', 'IH0', 'K', 'IH1', 'R', 'IY0']), 'harangue': ('NN', ['HH', 'ER0', 'AE1', 'NG']), 'harangued': ('VBN', ['HH', 'ER0', 'AE1', 'NG', 'D']), 'haranguing': ('VBG', ['HH', 'ER0', 'AE1', 'NG', 'IH0', 'NG']), 'harassed': ('VBN', ['HH', 'ER0', 'AE1', 'S', 'T']), 'harassing': ('VBG', ['HH', 'ER0', 'AE1', 'S', 'IH0', 'NG']), 'harass': ('NN', ['HH', 'ER0', 'AE1', 'S']), 'harasser': ('NN', ['HH', 'ER0', 'AE1', 'S', 'ER0']), 'harassment': ('NN', ['HH', 'ER0', 'AE1', 'S', 'M', 'AH0', 'N', 'T']), 'harbinger': ('NN', ['HH', 'AA1', 'R', 'B', 'IH0', 'N', 'JH', 'ER0']), 'harbor': ('NN', ['HH', 'AA1', 'R', 'B', 'ER0']), 'harbored': ('VBN', ['HH', 'AA1', 'R', 'B', 'ER0', 'D']), 'harboring': ('VBG', ['HH', 'AA1', 'R', 'B', 'ER0', 'IH0', 'NG']), 'hard': ('JJ', ['HH', 'AA1', 'R', 'D']), 'hardened': ('VBN', ['HH', 'AA1', 'R', 'D', 'AH0', 'N', 'D']), 'hardening': ('VBG', ['HH', 'AA1', 'R', 'D', 'AH0', 'N', 'IH0', 'NG']), 'harden': ('NN', ['HH', 'AA1', 'R', 'D', 'AH0', 'N']), 'hardener': ('NN', ['HH', 'AA1', 'R', 'D', 'AH0', 'N', 'ER0']), 'harder': ('NN', ['HH', 'AA1', 'R', 'D', 'ER0']), 'hardhead': ('NN', ['HH', 'AA1', 'R', 'D', 'HH', 'EH2', 'D']), 'hardly': ('RB', ['HH', 'AA1', 'R', 'D', 'L', 'IY0']), 'hardness': ('NN', ['HH', 'AA1', 'R', 'D', 'N', 'AH0', 'S']), 'hards': ('NNS', ['HH', 'AA1', 'R', 'D', 'Z']), 'hardship': ('NN', ['HH', 'AA1', 'R', 'D', 'SH', 'IH0', 'P']), 'hardware': ('NN', ['HH', 'AA1', 'R', 'D', 'W', 'EH2', 'R']), 'hardy': ('NN', ['HH', 'AA1', 'R', 'D', 'IY0']), 'hare': ('NN', ['HH', 'EH1', 'R']), 'harem': ('NN', ['HH', 'EH1', 'R', 'AH0', 'M']), 'hark': ('NN', ['HH', 'AA1', 'R', 'K']), 'harken': ('NN', ['HH', 'AA1', 'R', 'K', 'AH0', 'N']), 'harl': ('NN', ['HH', 'AA1', 'R', 'L']), 'harle': ('NN', ['HH', 'AA1', 'R', 'AH0', 'L']), 'harlequin': ('NN', ['HH', 'AA1', 'R', 'L', 'AH0', 'K', 'W', 'AH0', 'N']), 'harlot': ('NN', ['HH', 'AA1', 'R', 'L', 'AH0', 'T']), 'harm': ('NN', ['HH', 'AA1', 'R', 'M']), 'harmed': ('VBN', ['HH', 'AA1', 'R', 'M', 'D']), 'harming': ('VBG', ['HH', 'AA1', 'R', 'M', 'IH0', 'NG']), 'harmattan': ('NN', ['HH', 'AA2', 'R', 'M', 'AH0', 'T', 'AE1', 'N']), 'harmel': ('NN', ['HH', 'AA1', 'R', 'M', 'AH0', 'L']), 'harmful': ('NN', ['HH', 'AA1', 'R', 'M', 'F', 'AH0', 'L']), 'harmless': ('NN', ['HH', 'AA1', 'R', 'M', 'L', 'AH0', 'S']), 'harmonic': ('NN', ['HH', 'AA0', 'R', 'M', 'AA1', 'N', 'IH0', 'K']), 'harmonica': ('NN', ['HH', 'AA0', 'R', 'M', 'AA1', 'N', 'IH0', 'K', 'AH0']), 'harmonics': ('NNS', ['HH', 'AA0', 'R', 'M', 'AA1', 'N', 'IH0', 'K', 'S']), 'harmonious': ('JJ', ['HH', 'AA0', 'R', 'M', 'OW1', 'N', 'IY0', 'AH0', 'S']), 'harmonium': ('NN', ['HH', 'AA0', 'R', 'M', 'OW1', 'N', 'IY0', 'AH0', 'M']), 'harmonization': ('NN', ['HH', 'AA2', 'R', 'M', 'AH0', 'N', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'harmonized': ('VBN', ['HH', 'AA1', 'R', 'M', 'AH0', 'N', 'AY2', 'Z', 'D']), 'harmonizing': ('VBG', ['HH', 'AA1', 'R', 'M', 'AH0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'harmonize': ('NN', ['HH', 'AA1', 'R', 'M', 'AH0', 'N', 'AY2', 'Z']), 'harmonies': ('NNS', ['HH', 'AA1', 'R', 'M', 'AH0', 'N', 'IY0', 'Z']), 'harmony': ('NN', ['HH', 'AA1', 'R', 'M', 'AH0', 'N', 'IY0']), 'harness': ('NN', ['HH', 'AA1', 'R', 'N', 'AH0', 'S']), 'harnessed': ('VBN', ['HH', 'AA1', 'R', 'N', 'AH0', 'S', 'T']), 'harnessing': ('VBG', ['HH', 'AA1', 'R', 'N', 'AH0', 'S', 'IH0', 'NG']), 'harp': ('NN', ['HH', 'AA1', 'R', 'P']), 'harped': ('NN', ['HH', 'AA1', 'R', 'P', 'T']), 'harping': ('VBG', ['HH', 'AA1', 'R', 'P', 'IH0', 'NG']), 'harper': ('NN', ['HH', 'AA1', 'R', 'P', 'ER0']), 'harpist': ('NN', ['HH', 'AA1', 'R', 'P', 'IH0', 'S', 'T']), 'harpoon': ('NN', ['HH', 'AA0', 'R', 'P', 'UW1', 'N']), 'harpsichord': ('NN', ['HH', 'AA1', 'R', 'P', 'S', 'AH0', 'K', 'AO2', 'R', 'D']), 'harquebus': ('NN', ['HH', 'AA1', 'R', 'K', 'W', 'AH0', 'B', 'AH0', 'S']), 'harre': ('NN', ['HH', 'AE1', 'R']), 'harrier': ('NN', ['HH', 'EH1', 'R', 'IY0', 'ER0']), 'harrow': ('NN', ['HH', 'AE1', 'R', 'OW0']), 'harrowing': ('VBG', ['HH', 'EH1', 'R', 'OW0', 'IH0', 'NG']), 'harrower': ('NN', ['HH', 'AE1', 'R', 'OW0', 'W', 'ER0']), 'harried': ('VBN', ['HH', 'EH1', 'R', 'IY0', 'D']), 'harry': ('NN', ['HH', 'EH1', 'R', 'IY0']), 'harsh': ('NN', ['HH', 'AA1', 'R', 'SH']), 'harshly': ('RB', ['HH', 'AA1', 'R', 'SH', 'L', 'IY0']), 'harshness': ('NN', ['HH', 'AA1', 'R', 'SH', 'N', 'AH0', 'S']), 'hart': ('NN', ['HH', 'AA1', 'R', 'T']), 'harten': ('NN', ['HH', 'AA1', 'R', 'T', 'AH0', 'N']), 'hartford': ('NN', ['HH', 'AA1', 'R', 'T', 'F', 'ER0', 'D']), 'hartshorn': ('NN', ['HH', 'AA1', 'R', 'T', 'S', 'HH', 'AO2', 'R', 'N']), 'harvest': ('NN', ['HH', 'AA1', 'R', 'V', 'AH0', 'S', 'T']), 'harvested': ('VBN', ['HH', 'AA1', 'R', 'V', 'AH0', 'S', 'T', 'AH0', 'D']), 'harvesting': ('VBG', ['HH', 'AA1', 'R', 'V', 'AH0', 'S', 'T', 'IH0', 'NG']), 'harvester': ('NN', ['HH', 'AA1', 'R', 'V', 'AH0', 'S', 'T', 'ER0']), 'has': ('VBZ', ['HH', 'AE1', 'Z']), 'hase': ('NN', ['HH', 'EY1', 'Z']), 'hash': ('NN', ['HH', 'AE1', 'SH']), 'hashed': ('VBN', ['HH', 'AE1', 'SH', 'T']), 'hashing': ('VBG', ['HH', 'AE1', 'SH', 'IH0', 'NG']), 'hashish': ('NN', ['HH', 'AE1', 'SH', 'IH0', 'SH']), 'hast': ('NN', ['HH', 'AE1', 'S', 'T']), 'haste': ('NN', ['HH', 'EY1', 'S', 'T']), 'hasting': ('VBG', ['HH', 'EY1', 'S', 'T', 'IH0', 'NG']), 'hastened': ('VBN', ['HH', 'EY1', 'S', 'AH0', 'N', 'D']), 'hastening': ('VBG', ['HH', 'EY1', 'S', 'AH0', 'N', 'IH0', 'NG']), 'hasten': ('NN', ['HH', 'EY1', 'S', 'AH0', 'N']), 'hastily': ('RB', ['HH', 'EY1', 'S', 'T', 'AH0', 'L', 'IY0']), 'hastings': ('NNS', ['HH', 'EY1', 'S', 'T', 'IH0', 'NG', 'Z']), 'hasty': ('NN', ['HH', 'EY1', 'S', 'T', 'IY0']), 'hat': ('NN', ['HH', 'AE1', 'T']), 'hatched': ('VBN', ['HH', 'AE1', 'CH', 'T']), 'hatching': ('VBG', ['HH', 'AE1', 'CH', 'IH0', 'NG']), 'hatch': ('NN', ['HH', 'AE1', 'CH']), 'hatchel': ('NN', ['HH', 'AE1', 'CH', 'AH0', 'L']), 'hatcher': ('NN', ['HH', 'AE1', 'CH', 'ER0']), 'hatchery': ('NN', ['HH', 'AE1', 'CH', 'ER0', 'IY0']), 'hatchet': ('NN', ['HH', 'AE1', 'CH', 'AH0', 'T']), 'hated': ('VBN', ['HH', 'EY1', 'T', 'AH0', 'D']), 'hating': ('VBG', ['HH', 'EY1', 'T', 'IH0', 'NG']), 'hate': ('NN', ['HH', 'EY1', 'T']), 'hateful': ('NN', ['HH', 'EY1', 'T', 'F', 'AH0', 'L']), 'hater': ('NN', ['HH', 'EY1', 'T', 'ER0']), 'hath': ('NN', ['HH', 'AE1', 'TH']), 'hatred': ('VBN', ['HH', 'EY1', 'T', 'R', 'AH0', 'D']), 'hatter': ('NN', ['HH', 'AE1', 'T', 'ER0']), 'haugh': ('NN', ['HH', 'AO1']), 'haught': ('NN', ['HH', 'AO1', 'T']), 'haughtily': ('RB', ['HH', 'AO1', 'T', 'IH0', 'L', 'IY0']), 'haughty': ('NN', ['HH', 'AO1', 'T', 'IY0']), 'hauled': ('VBN', ['HH', 'AO1', 'L', 'D']), 'hauling': ('VBG', ['HH', 'AO1', 'L', 'IH0', 'NG']), 'haul': ('NN', ['HH', 'AO1', 'L']), 'hauler': ('NN', ['HH', 'AO1', 'L', 'ER0']), 'hauls': ('NN', ['HH', 'AO1', 'L', 'Z']), 'haunted': ('VBN', ['HH', 'AO1', 'N', 'T', 'AH0', 'D']), 'haunting': ('VBG', ['HH', 'AO1', 'N', 'T', 'IH0', 'NG']), 'haunt': ('NN', ['HH', 'AO1', 'N', 'T']), 'hausen': ('NN', ['HH', 'AW1', 'Z', 'AH0', 'N']), 'haut': ('NN', ['HH', 'AO1', 'T']), 'havana': ('NN', ['HH', 'AH0', 'V', 'AE1', 'N', 'AH0']), 'having': ('VBG', ['HH', 'AE1', 'V', 'IH0', 'NG']), 'have': ('VB', ['HH', 'AE1', 'V']), 'havelock': ('NN', ['HH', 'AE1', 'V', 'L', 'AA2', 'K']), 'haven': ('NN', ['HH', 'EY1', 'V', 'AH0', 'N']), 'havener': ('NN', ['HH', 'AE1', 'V', 'IY0', 'N', 'ER0']), 'haver': ('NN', ['HH', 'EH1', 'V', 'ER0']), 'havoc': ('NN', ['HH', 'AE1', 'V', 'AH0', 'K']), 'haw': ('NN', ['HH', 'AO1']), 'hawing': ('VBG', ['HH', 'AO1', 'IH0', 'NG']), 'hawaiian': ('JJ', ['HH', 'AH0', 'W', 'AY1', 'AH0', 'N']), 'hawk': ('NN', ['HH', 'AO1', 'K']), 'hawked': ('VBN', ['HH', 'AO1', 'K', 'T']), 'hawking': ('VBG', ['HH', 'AO1', 'K', 'IH0', 'NG']), 'hawkbill': ('NN', ['HH', 'AO1', 'K', 'B', 'IH2', 'L']), 'hawker': ('NN', ['HH', 'AO1', 'K', 'ER0']), 'hawkey': ('NN', ['HH', 'AO1', 'K', 'IY2']), 'hawthorn': ('NN', ['HH', 'AO1', 'TH', 'AO2', 'R', 'N']), 'hay': ('NN', ['HH', 'EY1']), 'haycock': ('NN', ['HH', 'EY1', 'K', 'AA2', 'K']), 'hayfield': ('NN', ['HH', 'EY1', 'F', 'IY2', 'L', 'D']), 'hayloft': ('NN', ['HH', 'EY1', 'L', 'AO2', 'F', 'T']), 'haymaker': ('NN', ['HH', 'EY1', 'M', 'EY2', 'K', 'ER0']), 'haystack': ('NN', ['HH', 'EY1', 'S', 'T', 'AE2', 'K']), 'hayward': ('NN', ['HH', 'EY1', 'W', 'ER0', 'D']), 'hazard': ('NN', ['HH', 'AE1', 'Z', 'ER0', 'D']), 'hazardous': ('JJ', ['HH', 'AE1', 'Z', 'ER0', 'D', 'AH0', 'S']), 'haze': ('NN', ['HH', 'EY1', 'Z']), 'hazing': ('VBG', ['HH', 'EY1', 'Z', 'IH0', 'NG']), 'hazel': ('NN', ['HH', 'EY1', 'Z', 'AH0', 'L']), 'hazelnut': ('NN', ['HH', 'EY1', 'Z', 'AH0', 'L', 'N', 'AH2', 'T']), 'hazle': ('NN', ['HH', 'EY1', 'Z', 'AH0', 'L']), 'hazy': ('NN', ['HH', 'EY1', 'Z', 'IY0']), 'he': ('PRP', ['HH', 'IY1']), 'head': ('NN', ['HH', 'EH1', 'D']), 'headed': ('VBN', ['HH', 'EH1', 'D', 'AH0', 'D']), 'heading': ('VBG', ['HH', 'EH1', 'D', 'IH0', 'NG']), 'headache': ('NN', ['HH', 'EH1', 'D', 'EY2', 'K']), 'headband': ('NN', ['HH', 'EH1', 'D', 'B', 'AE2', 'N', 'D']), 'headdress': ('NN', ['HH', 'EH1', 'D', 'R', 'EH2', 'S']), 'header': ('NN', ['HH', 'EH1', 'D', 'ER0']), 'headfirst': ('NN', ['HH', 'EH1', 'D', 'F', 'ER1', 'S', 'T']), 'headgear': ('NN', ['HH', 'EH1', 'D', 'G', 'IH2', 'R']), 'headless': ('NN', ['HH', 'EH1', 'D', 'L', 'AH0', 'S']), 'headlight': ('NN', ['HH', 'EH1', 'D', 'L', 'AY2', 'T']), 'headline': ('NN', ['HH', 'EH1', 'D', 'L', 'AY2', 'N']), 'headlong': ('NN', ['HH', 'EH1', 'D', 'L', 'AO2', 'NG']), 'headman': ('NN', ['HH', 'EH1', 'D', 'M', 'AH0', 'N']), 'headquarters': ('NNS', ['HH', 'EH1', 'D', 'K', 'W', 'AO2', 'R', 'T', 'ER0', 'Z']), 'headroom': ('NN', ['HH', 'EH1', 'D', 'R', 'UW2', 'M']), 'headship': ('NN', ['HH', 'EH1', 'D', 'SH', 'IH2', 'P']), 'headsman': ('NN', ['HH', 'EH1', 'D', 'Z', 'M', 'AH0', 'N']), 'headstone': ('NN', ['HH', 'EH1', 'D', 'S', 'T', 'OW2', 'N']), 'headstrong': ('NN', ['HH', 'EH1', 'D', 'S', 'T', 'R', 'AO2', 'NG']), 'headway': ('NN', ['HH', 'EH1', 'D', 'W', 'EY2']), 'heady': ('NN', ['HH', 'EH1', 'D', 'IY0']), 'heal': ('NN', ['HH', 'IY1', 'L']), 'healed': ('VBN', ['HH', 'IY1', 'L', 'D']), 'healing': ('VBG', ['HH', 'IY1', 'L', 'IH0', 'NG']), 'heald': ('NN', ['HH', 'IY1', 'L', 'D']), 'health': ('NN', ['HH', 'EH1', 'L', 'TH']), 'healthful': ('NN', ['HH', 'EH1', 'L', 'TH', 'F', 'AH0', 'L']), 'healthiness': ('NN', ['HH', 'EH1', 'L', 'TH', 'IY0', 'N', 'AH0', 'S']), 'healthy': ('JJ', ['HH', 'EH1', 'L', 'TH', 'IY0']), 'heap': ('NN', ['HH', 'IY1', 'P']), 'heaped': ('NN', ['HH', 'IY1', 'P', 'T']), 'heaping': ('VBG', ['HH', 'IY1', 'P', 'IH0', 'NG']), 'heard': ('NN', ['HH', 'ER1', 'D']), 'hearing': ('NN', ['HH', 'IY1', 'R', 'IH0', 'NG']), 'hear': ('NN', ['HH', 'IY1', 'R']), 'hearer': ('NN', ['HH', 'IY1', 'R', 'ER0']), 'hearsay': ('NN', ['HH', 'IY1', 'R', 'S', 'EY2']), 'hearse': ('NN', ['HH', 'ER1', 'S']), 'heart': ('NN', ['HH', 'AA1', 'R', 'T']), 'heartache': ('NN', ['HH', 'AA1', 'R', 'T', 'EY2', 'K']), 'heartbreak': ('NN', ['HH', 'AA1', 'R', 'T', 'B', 'R', 'EY2', 'K']), 'heartbreaking': ('VBG', ['HH', 'AA1', 'R', 'T', 'B', 'R', 'EY2', 'K', 'IH0', 'NG']), 'heartbroken': ('NN', ['HH', 'AA1', 'R', 'T', 'B', 'R', 'OW2', 'K', 'AH0', 'N']), 'heartburn': ('NN', ['HH', 'AA1', 'R', 'T', 'B', 'ER2', 'N']), 'hearted': ('VBN', ['HH', 'AA1', 'R', 'T', 'AH0', 'D']), 'hearten': ('NN', ['HH', 'AA1', 'R', 'T', 'AH0', 'N']), 'heartfelt': ('NN', ['HH', 'AA1', 'R', 'T', 'F', 'EH2', 'L', 'T']), 'hearth': ('NN', ['HH', 'AA1', 'R', 'TH']), 'heartily': ('RB', ['HH', 'AA1', 'R', 'T', 'AH0', 'L', 'IY0']), 'heartless': ('NN', ['HH', 'AA1', 'R', 'T', 'L', 'AH0', 'S']), 'heartwood': ('NN', ['HH', 'AA1', 'R', 'T', 'W', 'UH2', 'D']), 'hearty': ('NN', ['HH', 'AA1', 'R', 'T', 'IY0']), 'heat': ('NN', ['HH', 'IY1', 'T']), 'heated': ('VBN', ['HH', 'IY1', 'T', 'AH0', 'D']), 'heating': ('NN', ['HH', 'IY1', 'T', 'IH0', 'NG']), 'heater': ('NN', ['HH', 'IY1', 'T', 'ER0']), 'heath': ('NN', ['HH', 'IY1', 'TH']), 'heathen': ('NN', ['HH', 'IY1', 'DH', 'AH0', 'N']), 'heather': ('NN', ['HH', 'EH1', 'DH', 'ER0']), 'heaved': ('VBN', ['HH', 'IY1', 'V', 'D']), 'hove': ('NN', ['HH', 'OW1', 'V']), 'hoven': ('NN', ['HH', 'OW1', 'V', 'AH0', 'N']), 'heaving': ('VBG', ['HH', 'IY1', 'V', 'IH0', 'NG']), 'heave': ('NN', ['HH', 'IY1', 'V']), 'heaven': ('NN', ['HH', 'EH1', 'V', 'AH0', 'N']), 'heavenly': ('RB', ['HH', 'EH1', 'V', 'AH0', 'N', 'L', 'IY0']), 'heaves': ('NNS', ['HH', 'IY1', 'V', 'Z']), 'heavily': ('RB', ['HH', 'EH1', 'V', 'AH0', 'L', 'IY0']), 'heavy': ('JJ', ['HH', 'EH1', 'V', 'IY0']), 'hebe': ('NN', ['HH', 'IY1', 'B']), 'hebrew': ('NN', ['HH', 'IY1', 'B', 'R', 'UW0']), 'heck': ('NN', ['HH', 'EH1', 'K']), 'heckle': ('NN', ['HH', 'EH1', 'K', 'AH0', 'L']), 'hectare': ('NN', ['HH', 'EH1', 'K', 'T', 'AA2', 'R']), 'hectic': ('JJ', ['HH', 'EH1', 'K', 'T', 'IH0', 'K']), 'hectograph': ('NN', ['HH', 'EH1', 'K', 'T', 'AH0', 'G', 'R', 'AE2', 'F']), 'hector': ('NN', ['HH', 'EH1', 'K', 'T', 'ER0']), 'hectoring': ('VBG', ['HH', 'EH1', 'K', 'T', 'ER0', 'IH0', 'NG']), 'hedge': ('NN', ['HH', 'EH1', 'JH']), 'hedged': ('VBN', ['HH', 'EH1', 'JH', 'D']), 'hedging': ('VBG', ['HH', 'EH1', 'JH', 'IH0', 'NG']), 'hedgehog': ('NN', ['HH', 'EH1', 'JH', 'HH', 'AA2', 'G']), 'hedger': ('NN', ['HH', 'EH1', 'JH', 'ER0']), 'hedonic': ('NN', ['HH', 'AH0', 'D', 'AA1', 'N', 'IH0', 'K']), 'hedonistic': ('JJ', ['HH', 'IY2', 'D', 'AH0', 'N', 'IH1', 'S', 'T', 'IH0', 'K']), 'heeded': ('VBD', ['HH', 'IY1', 'D', 'AH0', 'D']), 'heeding': ('VBG', ['HH', 'IY1', 'D', 'IH0', 'NG']), 'heed': ('NN', ['HH', 'IY1', 'D']), 'heel': ('NN', ['HH', 'IY1', 'L']), 'heeled': ('VBN', ['HH', 'IY1', 'L', 'D']), 'heeling': ('VBG', ['HH', 'IY1', 'L', 'IH0', 'NG']), 'heer': ('NN', ['HH', 'IY1', 'ER0']), 'heft': ('NN', ['HH', 'EH1', 'F', 'T']), 'hefty': ('NN', ['HH', 'EH1', 'F', 'T', 'IY0']), 'hegelian': ('NN', ['HH', 'IY0', 'JH', 'IY1', 'L', 'IY0', 'AH0', 'N']), 'hegemonic': ('NN', ['HH', 'EH2', 'G', 'AH0', 'M', 'AA1', 'N', 'IH0', 'K']), 'hegemony': ('NN', ['HH', 'IY0', 'JH', 'EH1', 'M', 'AH0', 'N', 'IY0']), 'hegge': ('NN', ['HH', 'EH1', 'G']), 'heifer': ('NN', ['HH', 'AY1', 'F', 'ER0']), 'height': ('NN', ['HH', 'AY1', 'T']), 'heightened': ('VBN', ['HH', 'AY1', 'T', 'AH0', 'N', 'D']), 'heightening': ('VBG', ['HH', 'AY1', 'T', 'AH0', 'N', 'IH0', 'NG']), 'heighten': ('NN', ['HH', 'AY1', 'T', 'AH0', 'N']), 'heinous': ('JJ', ['HH', 'EY1', 'N', 'AH0', 'S']), 'heir': ('NN', ['EH1', 'R']), 'heiress': ('NN', ['EH1', 'R', 'AH0', 'S']), 'heirloom': ('NN', ['EH1', 'R', 'L', 'UW2', 'M']), 'held': ('NN', ['HH', 'EH1', 'L', 'D']), 'helena': ('NN', ['HH', 'EH1', 'L', 'AH0', 'N', 'AH0']), 'helical': ('JJ', ['HH', 'EH1', 'L', 'IH0', 'K', 'AH0', 'L']), 'helicon': ('NN', ['HH', 'EH1', 'L', 'IH0', 'K', 'AA2', 'N']), 'heliotrope': ('NN', ['HH', 'IY1', 'L', 'IY0', 'AH0', 'T', 'R', 'OW2', 'P']), 'helium': ('NN', ['HH', 'IY1', 'L', 'IY0', 'AH0', 'M']), 'helix': ('NN', ['HH', 'IY1', 'L', 'IH0', 'K', 'S']), 'hell': ('NN', ['HH', 'EH1', 'L']), 'hellenic': ('NN', ['HH', 'AH0', 'L', 'EH1', 'N', 'IH0', 'K']), 'hellenism': ('NN', ['HH', 'EH1', 'L', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'hellenistic': ('JJ', ['HH', 'EH2', 'L', 'AH0', 'N', 'IH1', 'S', 'T', 'IH0', 'K']), 'hellenize': ('NN', ['HH', 'EH1', 'L', 'AH0', 'N', 'AY2', 'Z']), 'hellier': ('NN', ['HH', 'EH1', 'L', 'Y', 'ER0']), 'hellish': ('NN', ['HH', 'EH1', 'L', 'IH0', 'SH']), 'hello': ('NN', ['HH', 'AH0', 'L', 'OW1']), 'helm': ('NN', ['HH', 'EH1', 'L', 'M']), 'helming': ('VBG', ['HH', 'EH1', 'L', 'M', 'IH0', 'NG']), 'helmet': ('NN', ['HH', 'EH1', 'L', 'M', 'AH0', 'T']), 'helmeted': ('VBN', ['HH', 'EH1', 'L', 'M', 'AH0', 'T', 'IH0', 'D']), 'helminth': ('NN', ['HH', 'EH1', 'L', 'M', 'IH0', 'N', 'TH']), 'helmsman': ('NN', ['HH', 'EH1', 'L', 'M', 'Z', 'M', 'AE2', 'N']), 'helot': ('NN', ['HH', 'EH1', 'L', 'AH0', 'T']), 'helotism': ('NN', ['HH', 'EH1', 'L', 'AH0', 'T', 'IH2', 'Z', 'AH0', 'M']), 'helotry': ('NN', ['HH', 'EH1', 'L', 'AH0', 'T', 'R', 'IY0']), 'helped': ('VBD', ['HH', 'EH1', 'L', 'P', 'T']), 'helping': ('VBG', ['HH', 'EH1', 'L', 'P', 'IH0', 'NG']), 'help': ('NN', ['HH', 'EH1', 'L', 'P']), 'helper': ('NN', ['HH', 'EH1', 'L', 'P', 'ER0']), 'helpful': ('NN', ['HH', 'EH1', 'L', 'P', 'F', 'AH0', 'L']), 'helpless': ('NN', ['HH', 'EH1', 'L', 'P', 'L', 'AH0', 'S']), 'hem': ('NN', ['HH', 'EH1', 'M']), 'hemmed': ('VBN', ['HH', 'EH1', 'M', 'D']), 'hemming': ('VBG', ['HH', 'EH1', 'M', 'IH0', 'NG']), 'hematite': ('NN', ['HH', 'EH1', 'M', 'AH0', 'T', 'AY2', 'T']), 'hematology': ('NN', ['HH', 'EH2', 'M', 'AH0', 'T', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'hemiplegia': ('NN', ['HH', 'EH2', 'M', 'AH0', 'P', 'L', 'IY1', 'JH', 'IY0', 'AH0']), 'hemisphere': ('RB', ['HH', 'EH1', 'M', 'IH0', 'S', 'F', 'IH2', 'R']), 'hemispheric': ('NN', ['HH', 'EH2', 'M', 'AH0', 'S', 'F', 'IH1', 'R', 'IH0', 'K']), 'hemlock': ('NN', ['HH', 'EH1', 'M', 'L', 'AA2', 'K']), 'hemmer': ('NN', ['HH', 'EH1', 'M', 'ER0']), 'hemoglobin': ('NN', ['HH', 'IY2', 'M', 'AH0', 'G', 'L', 'OW1', 'B', 'AH0', 'N']), 'hemophilia': ('NN', ['HH', 'IY2', 'M', 'AH0', 'F', 'IY1', 'L', 'IY0', 'AH0']), 'hemorrhage': ('NN', ['HH', 'EH1', 'M', 'ER0', 'IH0', 'JH']), 'hemorrhagic': ('NN', ['HH', 'EH2', 'M', 'ER0', 'AE1', 'G', 'IH0', 'K']), 'hemorrhoids': ('NNS', ['HH', 'EH1', 'M', 'ER0', 'OY2', 'D', 'Z']), 'hemp': ('NN', ['HH', 'EH1', 'M', 'P']), 'hempen': ('NN', ['HH', 'EH1', 'M', 'P', 'AH0', 'N']), 'hen': ('NN', ['HH', 'EH1', 'N']), 'henbane': ('NN', ['HH', 'EH1', 'N', 'B', 'EY2', 'N']), 'hence': ('NN', ['HH', 'EH1', 'N', 'S']), 'henceforth': ('NN', ['HH', 'EH1', 'N', 'S', 'F', 'AO1', 'R', 'TH']), 'henchman': ('NN', ['HH', 'EH1', 'N', 'CH', 'M', 'AH0', 'N']), 'hendy': ('NN', ['HH', 'EH1', 'N', 'D', 'IY0']), 'heng': ('NN', ['HH', 'EH1', 'NG']), 'henhouse': ('NN', ['HH', 'EH1', 'N', 'HH', 'AW2', 'S']), 'henna': ('NN', ['HH', 'EH1', 'N', 'AH0']), 'hennes': ('NNS', ['HH', 'EH1', 'N', 'Z']), 'henpecked': ('VBN', ['HH', 'EH1', 'N', 'P', 'EH2', 'K', 'T']), 'henpeck': ('NN', ['HH', 'EH1', 'N', 'P', 'EH2', 'K']), 'henry': ('NN', ['HH', 'EH1', 'N', 'R', 'IY0']), 'hep': ('NN', ['HH', 'EH1', 'P']), 'hepatic': ('JJ', ['HH', 'AH0', 'P', 'AE1', 'T', 'IH0', 'K']), 'hepatitis': ('NN', ['HH', 'EH2', 'P', 'AH0', 'T', 'AY1', 'T', 'AH0', 'S']), 'hepper': ('NN', ['HH', 'EH1', 'P', 'ER0']), 'her': ('PRP$', ['HH', 'ER0']), 'here': ('RB', ['HH', 'IY1', 'R']), 'herald': ('NN', ['HH', 'EH1', 'R', 'AH0', 'L', 'D']), 'heralded': ('VBD', ['HH', 'EH1', 'R', 'AH0', 'L', 'D', 'IH0', 'D']), 'heralding': ('VBG', ['HH', 'EH1', 'R', 'AH0', 'L', 'D', 'IH0', 'NG']), 'heraldic': ('NN', ['HH', 'EH0', 'R', 'AE1', 'L', 'D', 'IH0', 'K']), 'heraldry': ('NN', ['HH', 'EH1', 'R', 'AH0', 'L', 'D', 'R', 'IY0']), 'herb': ('NN', ['ER1', 'B']), 'herbaceous': ('JJ', ['ER0', 'B', 'EY1', 'SH', 'AH0', 'S']), 'herbal': ('NN', ['ER1', 'B', 'AH0', 'L']), 'herbalist': ('NN', ['ER1', 'B', 'AH0', 'L', 'AH0', 'S', 'T']), 'herbariums': ('NNS', ['HH', 'ER0', 'B', 'EH1', 'R', 'IY0', 'AH0', 'M', 'Z']), 'herbarium': ('NN', ['HH', 'ER0', 'B', 'EH1', 'R', 'IY0', 'AH0', 'M']), 'herber': ('NN', ['HH', 'ER1', 'B', 'ER0']), 'herbivore': ('NN', ['HH', 'ER1', 'B', 'IH0', 'V', 'AO2', 'R']), 'herbivorous': ('JJ', ['HH', 'ER0', 'B', 'IH1', 'V', 'ER0', 'AH0', 'S']), 'herculean': ('NN', ['HH', 'ER0', 'K', 'Y', 'UW1', 'L', 'IY0', 'AH0', 'N']), 'hercules': ('NNS', ['HH', 'ER1', 'K', 'Y', 'AH0', 'L', 'IY2', 'Z']), 'herd': ('NN', ['HH', 'ER1', 'D']), 'herded': ('VBD', ['HH', 'ER1', 'D', 'IH0', 'D']), 'herding': ('VBG', ['HH', 'ER1', 'D', 'IH0', 'NG']), 'herder': ('NN', ['HH', 'EH1', 'R', 'D', 'ER0']), 'herdman': ('NN', ['HH', 'ER1', 'D', 'M', 'AH0', 'N']), 'women': ('NNS', ['W', 'IH1', 'M', 'AH0', 'N']), 'hereabouts': ('NNS', ['HH', 'IH1', 'R', 'AH0', 'B', 'AW2', 'T', 'S']), 'hereafter': ('NN', ['HH', 'IH0', 'R', 'AE1', 'F', 'T', 'ER0']), 'hereby': ('NN', ['HH', 'IH0', 'R', 'B', 'AY1']), 'hereditary': ('NN', ['HH', 'ER0', 'EH1', 'D', 'AH0', 'T', 'EH2', 'R', 'IY0']), 'heredity': ('NN', ['HH', 'ER0', 'EH1', 'D', 'AH0', 'T', 'IY0']), 'hereford': ('NN', ['HH', 'EH1', 'R', 'AH0', 'F', 'ER0', 'D']), 'herein': ('NN', ['HH', 'IH0', 'R', 'IH1', 'N']), 'heresy': ('NN', ['HH', 'EH1', 'R', 'AH0', 'S', 'IY0']), 'heretic': ('JJ', ['HH', 'EH1', 'R', 'AH0', 'T', 'IH0', 'K']), 'heretical': ('JJ', ['HH', 'ER0', 'EH1', 'T', 'IH0', 'K', 'AH0', 'L']), 'heretofore': ('NN', ['HH', 'IH2', 'R', 'T', 'AH0', 'F', 'AO1', 'R']), 'herewith': ('NN', ['HH', 'IH1', 'R', 'W', 'IH1', 'TH']), 'heritable': ('JJ', ['HH', 'EH1', 'R', 'AH0', 'T', 'AH0', 'B', 'AH0', 'L']), 'heritage': ('NN', ['HH', 'EH1', 'R', 'AH0', 'T', 'AH0', 'JH']), 'herl': ('NN', ['HH', 'ER1', 'L']), 'herling': ('VBG', ['HH', 'ER1', 'L', 'IH0', 'NG']), 'hermaphrodite': ('NN', ['HH', 'ER0', 'M', 'AE1', 'F', 'R', 'AH0', 'D', 'AY2', 'T']), 'hermaphroditic': ('JJ', ['HH', 'ER0', 'M', 'AE2', 'F', 'R', 'AH0', 'D', 'IH1', 'T', 'IH0', 'K']), 'hermes': ('NNS', ['HH', 'ER1', 'M', 'IY0', 'Z']), 'hermetically': ('RB', ['HH', 'ER0', 'M', 'EH1', 'T', 'IH0', 'K', 'AH0', 'L', 'IY0']), 'hermit': ('NN', ['HH', 'ER1', 'M', 'AH0', 'T']), 'hermitage': ('NN', ['HH', 'ER1', 'M', 'AH0', 'T', 'AH0', 'JH']), 'hern': ('NN', ['HH', 'ER1', 'N']), 'herne': ('NN', ['HH', 'ER1', 'N']), 'hernia': ('NN', ['HH', 'ER1', 'N', 'IY0', 'AH0']), 'heroes': ('NNS', ['HH', 'IH1', 'R', 'OW0', 'Z']), 'hero': ('NN', ['HH', 'IH1', 'R', 'OW0']), 'heroic': ('NN', ['HH', 'IH0', 'R', 'OW1', 'IH0', 'K']), 'heroine': ('NN', ['HH', 'EH1', 'R', 'OW0', 'AH0', 'N']), 'heroism': ('NN', ['HH', 'EH1', 'R', 'OW0', 'IH2', 'Z', 'AH0', 'M']), 'heron': ('NN', ['HH', 'EH1', 'R', 'AH0', 'N']), 'herpes': ('NNS', ['HH', 'ER1', 'P', 'IY0', 'Z']), 'herr': ('NN', ['HH', 'EH1', 'R']), 'herring': ('VBG', ['HH', 'EH1', 'R', 'IH0', 'NG']), 'hers': ('NNS', ['HH', 'ER0', 'Z']), 'herschel': ('NN', ['HH', 'ER1', 'SH', 'AH0', 'L']), 'herself': ('NN', ['HH', 'ER0', 'S', 'EH1', 'L', 'F']), 'hert': ('NN', ['HH', 'ER1', 'T']), 'hesitancy': ('NN', ['HH', 'EH1', 'Z', 'IH0', 'T', 'AH0', 'N', 'S', 'IY0']), 'hesitant': ('NN', ['HH', 'EH1', 'Z', 'IH0', 'T', 'AH0', 'N', 'T']), 'hesitantly': ('RB', ['HH', 'EH1', 'Z', 'IH0', 'T', 'AH0', 'N', 'T', 'L', 'IY0']), 'hesitated': ('VBN', ['HH', 'EH1', 'Z', 'IH0', 'T', 'EY2', 'T', 'IH0', 'D']), 'hesitating': ('VBG', ['HH', 'EH1', 'Z', 'AH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'hesitate': ('NN', ['HH', 'EH1', 'Z', 'AH0', 'T', 'EY2', 'T']), 'hesitation': ('NN', ['HH', 'EH2', 'Z', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'hesper': ('NN', ['HH', 'EH1', 'S', 'P', 'ER0']), 'hessian': ('NN', ['HH', 'EH1', 'SH', 'AH0', 'N']), 'hessite': ('NN', ['HH', 'EH1', 'S', 'AY0', 'T']), 'heterocercal': ('JJ', ['HH', 'EH2', 'T', 'ER0', 'OW0', 'S', 'ER1', 'K', 'AH0', 'L']), 'heterodox': ('NN', ['HH', 'EH2', 'T', 'ER0', 'AH0', 'D', 'AA2', 'K', 'S']), 'heterodoxy': ('NN', ['HH', 'EH1', 'T', 'ER0', 'AH0', 'D', 'AA2', 'K', 'S', 'IY0']), 'heterogeneity': ('NN', ['HH', 'EH2', 'T', 'ER0', 'AH0', 'JH', 'IH0', 'N', 'IY1', 'AH0', 'T', 'IY0']), 'heterogeneous': ('JJ', ['HH', 'EH2', 'T', 'ER0', 'AH0', 'JH', 'IY1', 'N', 'Y', 'AH0', 'S']), 'heterosis': ('NN', ['HH', 'EH2', 'T', 'ER0', 'OW1', 'S', 'AH0', 'S']), 'heterosporous': ('JJ', ['HH', 'EH2', 'T', 'ER0', 'AA1', 'S', 'P', 'ER0', 'AH0', 'S']), 'hetman': ('NN', ['HH', 'EH1', 'T', 'M', 'AH0', 'N']), 'heuristic': ('JJ', ['HH', 'Y', 'UH0', 'R', 'IH1', 'S', 'T', 'IH0', 'K']), 'hewn': ('NN', ['HH', 'Y', 'UW1', 'N']), 'hewing': ('VBG', ['HH', 'Y', 'UW1', 'IH0', 'NG']), 'hew': ('NN', ['HH', 'Y', 'UW1']), 'hewe': ('NN', ['HH', 'Y', 'UW1']), 'hewer': ('NN', ['HH', 'Y', 'UW1', 'ER0']), 'hexagon': ('NN', ['HH', 'EH1', 'K', 'S', 'AH0', 'G', 'AA2', 'N']), 'hexagonal': ('NN', ['HH', 'EH0', 'K', 'S', 'AE1', 'G', 'AH0', 'N', 'AH0', 'L']), 'hexane': ('NN', ['HH', 'EH1', 'K', 'S', 'EY0', 'N']), 'hey': ('NN', ['HH', 'EY1']), 'heyday': ('NN', ['HH', 'EY1', 'D', 'EY2']), 'heyne': ('NN', ['HH', 'EY1', 'N']), 'hiatus': ('NN', ['HH', 'AY0', 'EY1', 'T', 'AH0', 'S']), 'hibernate': ('NN', ['HH', 'AY1', 'B', 'ER0', 'N', 'EY2', 'T']), 'hibernation': ('NN', ['HH', 'AY2', 'B', 'ER0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'hiccough': ('NN', ['HH', 'IH1', 'K', 'AH0', 'P']), 'hickory': ('NN', ['HH', 'IH1', 'K', 'ER0', 'IY0']), 'hid': ('NN', ['HH', 'IH1', 'D']), 'hidalgo': ('NN', ['HH', 'AH0', 'D', 'AE1', 'L', 'G', 'OW2']), 'hidden': ('NN', ['HH', 'IH1', 'D', 'AH0', 'N']), 'hiding': ('NN', ['HH', 'AY1', 'D', 'IH0', 'NG']), 'hide': ('NN', ['HH', 'AY1', 'D']), 'hidebound': ('NN', ['HH', 'AY1', 'D', 'B', 'AW2', 'N', 'D']), 'hideous': ('JJ', ['HH', 'IH1', 'D', 'IY0', 'AH0', 'S']), 'hider': ('NN', ['HH', 'AY1', 'D', 'ER0']), 'hiems': ('NNS', ['HH', 'IY1', 'M', 'Z']), 'hierarchical': ('JJ', ['HH', 'AY2', 'R', 'AA1', 'R', 'K', 'AH0', 'K', 'AH0', 'L']), 'hierarchies': ('NNS', ['HH', 'AY1', 'R', 'AA2', 'R', 'K', 'IY0', 'Z']), 'hierarchy': ('NN', ['HH', 'AY1', 'ER0', 'AA2', 'R', 'K', 'IY0']), 'hieroglyph': ('NN', ['HH', 'AY2', 'R', 'OW0', 'G', 'L', 'IH1', 'F']), 'hieroglyphic': ('JJ', ['HH', 'AY2', 'R', 'OW0', 'G', 'L', 'IH1', 'F', 'IH0', 'K']), 'high': ('JJ', ['HH', 'AY1']), 'highflier': ('NN', ['HH', 'AY1', 'F', 'L', 'AY2', 'ER0']), 'highflying': ('VBG', ['HH', 'AY1', 'F', 'L', 'AY2', 'IH0', 'NG']), 'highland': ('NN', ['HH', 'AY1', 'L', 'AH0', 'N', 'D']), 'highlander': ('NN', ['HH', 'AY1', 'L', 'AE2', 'N', 'D', 'ER0']), 'highly': ('RB', ['HH', 'AY1', 'L', 'IY0']), 'highness': ('NN', ['HH', 'AY1', 'N', 'AH0', 'S']), 'high-spirited': ('JJ', ['HH', 'AY1', 'S', 'P', 'IH1', 'R', 'IH0', 'D', 'IH0', 'D']), 'hight': ('NN', ['HH', 'AY1', 'T']), 'hot': ('JJ', ['HH', 'AA1', 'T']), 'hote': ('NN', ['HH', 'OW1', 'T']), 'hoten': ('NN', ['HH', 'OW1', 'T', 'AH0', 'N']), 'highway': ('NN', ['HH', 'AY1', 'W', 'EY2']), 'hilarious': ('JJ', ['HH', 'IH0', 'L', 'EH1', 'R', 'IY0', 'AH0', 'S']), 'hilarity': ('NN', ['HH', 'IH0', 'L', 'EH1', 'R', 'AH0', 'T', 'IY0']), 'hile': ('NN', ['HH', 'AY1', 'L']), 'hill': ('NN', ['HH', 'IH1', 'L']), 'hilling': ('VBG', ['HH', 'IH1', 'L', 'IH0', 'NG']), 'hillock': ('NN', ['HH', 'IH1', 'L', 'AH0', 'K']), 'hillside': ('NN', ['HH', 'IH1', 'L', 'S', 'AY2', 'D']), 'hilltop': ('NN', ['HH', 'IH1', 'L', 'T', 'AA2', 'P']), 'hilly': ('RB', ['HH', 'IH1', 'L', 'IY0']), 'hilt': ('NN', ['HH', 'IH1', 'L', 'T']), 'him': ('PRP', ['HH', 'IH1', 'M']), 'himalayan': ('NN', ['HH', 'IH2', 'M', 'AH0', 'L', 'EY1', 'AH0', 'N']), 'himself': ('PRP', ['HH', 'IH0', 'M', 'S', 'EH1', 'L', 'F']), 'hind': ('NN', ['HH', 'AY1', 'N', 'D']), 'hinder': ('NN', ['HH', 'IH1', 'N', 'D', 'ER0']), 'hindered': ('VBN', ['HH', 'IH1', 'N', 'D', 'ER0', 'D']), 'hindering': ('VBG', ['HH', 'IH1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'hinderer': ('NN', ['HH', 'IH1', 'N', 'D', 'ER0', 'ER0']), 'hindi': ('NN', ['HH', 'IH1', 'N', 'D', 'IY0']), 'hindus': ('NN', ['HH', 'IH1', 'N', 'D', 'UW2', 'Z']), 'hindu': ('NN', ['HH', 'IH1', 'N', 'D', 'UW2']), 'hinduism': ('NN', ['HH', 'IH1', 'N', 'JH', 'UW0', 'IH2', 'Z', 'AH0', 'M']), 'hindrance': ('NN', ['HH', 'IH1', 'N', 'D', 'R', 'AH0', 'N', 'S']), 'hine': ('NN', ['HH', 'AY1', 'N']), 'hinge': ('NN', ['HH', 'IH1', 'N', 'JH']), 'hinged': ('VBN', ['HH', 'IH1', 'N', 'JH', 'D']), 'hink': ('NN', ['HH', 'IH1', 'NG', 'K']), 'hinny': ('NN', ['HH', 'IH1', 'N', 'IY0']), 'hinted': ('VBN', ['HH', 'IH1', 'N', 'T', 'AH0', 'D']), 'hinting': ('VBG', ['HH', 'IH1', 'N', 'T', 'IH0', 'NG']), 'hint': ('NN', ['HH', 'IH1', 'N', 'T']), 'hip': ('NN', ['HH', 'IH1', 'P']), 'hipps': ('NN', ['HH', 'IH1', 'P', 'S']), 'hippe': ('NN', ['HH', 'IH1', 'P']), 'hippocampus': ('NN', ['HH', 'IH2', 'P', 'OW0', 'K', 'AE1', 'M', 'P', 'AH0', 'S']), 'hippocrates': ('NNS', ['HH', 'IH1', 'P', 'AH0', 'K', 'R', 'EY2', 'T', 'S']), 'hippocratic': ('JJ', ['HH', 'IH0', 'P', 'AH0', 'K', 'R', 'AE1', 'T', 'IH0', 'K']), 'hippodrome': ('NN', ['HH', 'IH1', 'P', 'AH0', 'D', 'R', 'OW2', 'M']), 'hippopotamuses': ('NNS', ['HH', 'IH2', 'P', 'AH0', 'P', 'AA1', 'T', 'AH0', 'M', 'AH0', 'S', 'IH0', 'Z']), 'hippopotamus': ('NN', ['HH', 'IH2', 'P', 'AH0', 'P', 'AA1', 'T', 'AH0', 'M', 'AH0', 'S']), 'hire': ('NN', ['HH', 'AY1', 'ER0']), 'hired': ('VBN', ['HH', 'AY1', 'ER0', 'D']), 'hiring': ('VBG', ['HH', 'AY1', 'R', 'IH0', 'NG']), 'hires': ('NNS', ['HH', 'AY1', 'ER0', 'Z']), 'hirsute': ('NN', ['HH', 'ER0', 'S', 'UW1', 'T']), 'his': ('PRP$', ['HH', 'IH1', 'Z']), 'hispanic': ('JJ', ['HH', 'IH0', 'S', 'P', 'AE1', 'N', 'IH0', 'K']), 'hissed': ('VBN', ['HH', 'IH1', 'S', 'T']), 'hissing': ('VBG', ['HH', 'IH1', 'S', 'IH0', 'NG']), 'hiss': ('NN', ['HH', 'IH1', 'S']), 'histology': ('NN', ['HH', 'IH0', 'S', 'T', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'historian': ('NN', ['HH', 'IH0', 'S', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'historic': ('NN', ['HH', 'IH0', 'S', 'T', 'AO1', 'R', 'IH0', 'K']), 'historical': ('JJ', ['HH', 'IH0', 'S', 'T', 'AO1', 'R', 'IH0', 'K', 'AH0', 'L']), 'historically': ('RB', ['HH', 'IH0', 'S', 'T', 'AO1', 'R', 'IH0', 'K', 'AH0', 'L', 'IY0']), 'historiography': ('NN', ['HH', 'IH0', 'S', 'T', 'AO2', 'R', 'IY0', 'AA1', 'G', 'R', 'AH0', 'F', 'IY0']), 'histories': ('NNS', ['HH', 'IH1', 'S', 'T', 'ER0', 'IY0', 'Z']), 'history': ('NN', ['HH', 'IH1', 'S', 'T', 'ER0', 'IY0']), 'histrionic': ('NN', ['HH', 'IH2', 'S', 'T', 'R', 'IY0', 'AA1', 'N', 'IH0', 'K']), 'hit': ('NN', ['HH', 'IH1', 'T']), 'hitting': ('VBG', ['HH', 'IH1', 'T', 'IH0', 'NG']), 'hitch': ('NN', ['HH', 'IH1', 'CH']), 'hitched': ('VBN', ['HH', 'IH1', 'CH', 'T']), 'hitching': ('VBG', ['HH', 'IH1', 'CH', 'IH0', 'NG']), 'hither': ('NN', ['HH', 'IH1', 'DH', 'ER0']), 'hitherto': ('NN', ['HH', 'IH1', 'DH', 'ER2', 'T', 'UW1']), 'hitter': ('NN', ['HH', 'IH1', 'T', 'ER0']), 'hive': ('NN', ['HH', 'AY1', 'V']), 'hives': ('NNS', ['HH', 'AY1', 'V', 'Z']), 'ho': ('NN', ['HH', 'OW1']), 'hoar': ('NN', ['HH', 'AO1', 'R']), 'hoard': ('NN', ['HH', 'AO1', 'R', 'D']), 'hoarded': ('VBD', ['HH', 'AO1', 'R', 'D', 'IH0', 'D']), 'hoarding': ('VBG', ['HH', 'AO1', 'R', 'D', 'IH0', 'NG']), 'hoarse': ('NN', ['HH', 'AO1', 'R', 'S']), 'hoarseness': ('NN', ['HH', 'AO1', 'R', 'S', 'N', 'AH0', 'S']), 'hoary': ('NN', ['HH', 'AO1', 'R', 'IY0']), 'hoax': ('NN', ['HH', 'OW1', 'K', 'S']), 'hob': ('NN', ['HH', 'AA1', 'B']), 'hobbled': ('VBN', ['HH', 'AA1', 'B', 'AH0', 'L', 'D']), 'hobbling': ('VBG', ['HH', 'AA1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'hobble': ('JJ', ['HH', 'AA1', 'B', 'AH0', 'L']), 'hobbies': ('NNS', ['HH', 'AA1', 'B', 'IY0', 'Z']), 'hobby': ('NN', ['HH', 'AA1', 'B', 'IY0']), 'hobnail': ('NN', ['HH', 'AA1', 'B', 'N', 'EY2', 'L']), 'hobnob': ('NN', ['HH', 'AA1', 'B', 'N', 'AA2', 'B']), 'hock': ('NN', ['HH', 'AA1', 'K']), 'hough': ('NN', ['HH', 'AH1', 'F']), 'hockey': ('NN', ['HH', 'AA1', 'K', 'IY0']), 'hocus': ('NN', ['HH', 'OW1', 'K', 'AH0', 'S']), 'hodgepodge': ('NN', ['HH', 'AA1', 'JH', 'P', 'AA2', 'JH']), 'hoe': ('NN', ['HH', 'OW1']), 'hoeing': ('VBG', ['HH', 'OW1', 'IH0', 'NG']), 'hog': ('NN', ['HH', 'AA1', 'G']), 'hogging': ('VBG', ['HH', 'AO1', 'G', 'IH0', 'NG']), 'hogwash': ('NN', ['HH', 'AA1', 'G', 'W', 'AA2', 'SH']), 'hoisted': ('VBN', ['HH', 'OY1', 'S', 'T', 'AH0', 'D']), 'hoisting': ('VBG', ['HH', 'OY1', 'S', 'T', 'IH0', 'NG']), 'hoist': ('NN', ['HH', 'OY1', 'S', 'T']), 'hoit': ('NN', ['HH', 'OY1', 'T']), 'hold': ('NN', ['HH', 'OW1', 'L', 'D']), 'holding': ('VBG', ['HH', 'OW1', 'L', 'D', 'IH0', 'NG']), 'holden': ('NN', ['HH', 'OW1', 'L', 'D', 'AH0', 'N']), 'holder': ('NN', ['HH', 'OW1', 'L', 'D', 'ER0']), 'holdfast': ('NN', ['HH', 'OW1', 'L', 'F', 'AE2', 'S', 'T']), 'hole': ('NN', ['HH', 'OW1', 'L']), 'holiday': ('NN', ['HH', 'AA1', 'L', 'AH0', 'D', 'EY2']), 'holiness': ('NN', ['HH', 'OW1', 'L', 'IY0', 'N', 'AH0', 'S']), 'holing': ('VBG', ['HH', 'OW1', 'L', 'IH0', 'NG']), 'holland': ('NN', ['HH', 'AA1', 'L', 'AH0', 'N', 'D']), 'hollander': ('NN', ['HH', 'AA1', 'L', 'AH0', 'N', 'D', 'ER0']), 'hollands': ('NNS', ['HH', 'AA1', 'L', 'AH0', 'N', 'D', 'Z']), 'hollo': ('NN', ['HH', 'AA1', 'L', 'OW2']), 'hollow': ('NN', ['HH', 'AA1', 'L', 'OW0']), 'hollowed': ('VBN', ['HH', 'AA1', 'L', 'OW0', 'D']), 'hollowing': ('VBG', ['HH', 'AA1', 'L', 'OW0', 'IH0', 'NG']), 'holly': ('RB', ['HH', 'AA1', 'L', 'IY0']), 'hollyhock': ('NN', ['HH', 'AA1', 'L', 'IY0', 'HH', 'AA2', 'K']), 'holm': ('NN', ['HH', 'OW1', 'M']), 'holmium': ('NN', ['HH', 'OW1', 'L', 'M', 'IY0', 'AH0', 'M']), 'holocaust': ('NN', ['HH', 'AA1', 'L', 'AH0', 'K', 'AO2', 'S', 'T']), 'holographic': ('JJ', ['HH', 'AA2', 'L', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K']), 'holster': ('NN', ['HH', 'OW1', 'L', 'S', 'T', 'ER0']), 'holt': ('NN', ['HH', 'OW1', 'L', 'T']), 'holy': ('NN', ['HH', 'OW1', 'L', 'IY0']), 'homage': ('NN', ['AA1', 'M', 'AH0', 'JH']), 'home': ('NN', ['HH', 'OW1', 'M']), 'homeless': ('NN', ['HH', 'OW1', 'M', 'L', 'AH0', 'S']), 'homelike': ('NN', ['HH', 'OW1', 'M', 'L', 'AY2', 'K']), 'homely': ('RB', ['HH', 'OW1', 'M', 'L', 'IY0']), 'homemade': ('NN', ['HH', 'OW1', 'M', 'M', 'EY1', 'D']), 'homeopathic': ('JJ', ['HH', 'OW2', 'M', 'IY0', 'OW0', 'P', 'AE1', 'TH', 'AH0', 'K']), 'homeopathy': ('NN', ['HH', 'OW2', 'M', 'IY0', 'OW0', 'P', 'AE1', 'TH', 'IY0']), 'homer': ('NN', ['HH', 'OW1', 'M', 'ER0']), 'homeric': ('NN', ['HH', 'OW0', 'M', 'EH1', 'R', 'IH0', 'K']), 'homesick': ('NN', ['HH', 'OW1', 'M', 'S', 'IH2', 'K']), 'homespun': ('NN', ['HH', 'OW1', 'M', 'S', 'P', 'AH2', 'N']), 'homestead': ('NN', ['HH', 'OW1', 'M', 'S', 'T', 'EH2', 'D']), 'homesteader': ('NN', ['HH', 'OW1', 'M', 'S', 'T', 'EH0', 'D', 'ER0']), 'homeward': ('NN', ['HH', 'OW1', 'M', 'W', 'ER0', 'D']), 'homicidal': ('NN', ['HH', 'AA2', 'M', 'AH0', 'S', 'AY1', 'D', 'AH0', 'L']), 'homicide': ('NN', ['HH', 'AA1', 'M', 'AH0', 'S', 'AY2', 'D']), 'homiletic': ('JJ', ['HH', 'AA2', 'M', 'AH0', 'L', 'EH1', 'T', 'IH0', 'K']), 'homilies': ('NNS', ['HH', 'OW1', 'M', 'AH0', 'L', 'IY0', 'Z']), 'homily': ('RB', ['HH', 'AA1', 'M', 'AH0', 'L', 'IY0']), 'homing': ('VBG', ['HH', 'OW1', 'M', 'IH0', 'NG']), 'hominy': ('NN', ['HH', 'AA1', 'M', 'IH0', 'N', 'IY0']), 'homogeneity': ('NN', ['HH', 'AA2', 'M', 'AH0', 'JH', 'AH0', 'N', 'IY1', 'AH0', 'T', 'IY0']), 'homogeneous': ('JJ', ['HH', 'OW2', 'M', 'AH0', 'JH', 'IY1', 'N', 'IY0', 'AH0', 'S']), 'homogenous': ('JJ', ['HH', 'AH0', 'M', 'AA1', 'JH', 'AH0', 'N', 'AH0', 'S']), 'homogeny': ('NN', ['HH', 'OW0', 'M', 'AA1', 'JH', 'AH0', 'N', 'IY0']), 'homologous': ('JJ', ['HH', 'AH0', 'M', 'AA1', 'L', 'AH0', 'G', 'AH0', 'S']), 'homonym': ('NN', ['HH', 'AO1', 'M', 'AH0', 'N', 'IH0', 'M']), 'homophone': ('NN', ['HH', 'AO1', 'M', 'AH0', 'F', 'OW2', 'N']), 'hone': ('NN', ['HH', 'OW1', 'N']), 'honed': ('VBN', ['HH', 'OW1', 'N', 'D']), 'honing': ('VBG', ['HH', 'OW1', 'N', 'IH0', 'NG']), 'honest': ('NN', ['AA1', 'N', 'AH0', 'S', 'T']), 'honestly': ('RB', ['AA1', 'N', 'AH0', 'S', 'T', 'L', 'IY0']), 'honesty': ('NN', ['AA1', 'N', 'AH0', 'S', 'T', 'IY0']), 'honey': ('NN', ['HH', 'AH1', 'N', 'IY0']), 'honeybee': ('NN', ['HH', 'AH1', 'N', 'IY0', 'B', 'IY2']), 'honeycomb': ('NN', ['HH', 'AH1', 'N', 'IY0', 'K', 'OW2', 'M']), 'honeydew': ('NN', ['HH', 'AH1', 'N', 'IY0', 'D', 'UW2']), 'honeymoon': ('NN', ['HH', 'AH1', 'N', 'IY0', 'M', 'UW2', 'N']), 'honeysuckle': ('NN', ['HH', 'AH1', 'N', 'IY0', 'S', 'AH2', 'K', 'AH0', 'L']), 'hong': ('NN', ['HH', 'AO1', 'NG']), 'honk': ('NN', ['HH', 'AA1', 'NG', 'K']), 'honor': ('NN', ['AA1', 'N', 'ER0']), 'honored': ('VBN', ['AA1', 'N', 'ER0', 'D']), 'honoring': ('VBG', ['AA1', 'N', 'ER0', 'IH0', 'NG']), 'honorable': ('JJ', ['AA1', 'N', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'honorably': ('RB', ['AA1', 'N', 'ER0', 'AH0', 'B', 'L', 'IY0']), 'honorarium': ('NN', ['AA2', 'N', 'ER0', 'EH1', 'R', 'IY0', 'AH0', 'M']), 'honorary': ('NN', ['AA1', 'N', 'ER0', 'EH2', 'R', 'IY0']), 'honorific': ('NN', ['AA2', 'N', 'ER0', 'IH1', 'F', 'IH0', 'K']), 'hoo': ('NN', ['HH', 'UW1']), 'hood': ('NN', ['HH', 'UH1', 'D']), 'hooded': ('VBD', ['HH', 'UH1', 'D', 'IH0', 'D']), 'hoodlum': ('NN', ['HH', 'UH1', 'D', 'L', 'AH0', 'M']), 'hoodwink': ('NN', ['HH', 'UH1', 'D', 'W', 'IH2', 'NG', 'K']), 'hoofs': ('NN', ['HH', 'UH1', 'F', 'S']), 'hooves': ('NNS', ['HH', 'UH1', 'V', 'Z']), 'hoof': ('NN', ['HH', 'UW1', 'F']), 'hoofed': ('NN', ['HH', 'UH1', 'F', 'T']), 'hook': ('NN', ['HH', 'UH1', 'K']), 'hooked': ('NNS', ['HH', 'UH1', 'K', 'T']), 'hooking': ('VBG', ['HH', 'UH1', 'K', 'IH0', 'NG']), 'hooker': ('NN', ['HH', 'UH1', 'K', 'ER0']), 'hooky': ('NN', ['HH', 'UH1', 'K', 'IY0']), 'hoop': ('NN', ['HH', 'UW1', 'P']), 'hooper': ('NN', ['HH', 'UW1', 'P', 'ER0']), 'hoopoe': ('NN', ['HH', 'UW1', 'P', 'UW2']), 'hoosier': ('NN', ['HH', 'UW1', 'ZH', 'ER0']), 'hooted': ('VBN', ['HH', 'UW1', 'T', 'IH0', 'D']), 'hoot': ('NN', ['HH', 'UW1', 'T']), 'hooven': ('NN', ['HH', 'UW1', 'V', 'AH0', 'N']), 'hopped': ('NN', ['HH', 'AA1', 'P', 'T']), 'hopping': ('VBG', ['HH', 'AA1', 'P', 'IH0', 'NG']), 'hop': ('NN', ['HH', 'AA1', 'P']), 'hope': ('NN', ['HH', 'OW1', 'P']), 'hoped': ('VBD', ['HH', 'OW1', 'P', 'T']), 'hoping': ('VBG', ['HH', 'OW1', 'P', 'IH0', 'NG']), 'hopeful': ('NN', ['HH', 'OW1', 'P', 'F', 'AH0', 'L']), 'hopeless': ('NN', ['HH', 'OW1', 'P', 'L', 'AH0', 'S']), 'hopper': ('NN', ['HH', 'AA1', 'P', 'ER0']), 'hopple': ('NN', ['HH', 'AA1', 'P', 'AH0', 'L']), 'hopscotch': ('NN', ['HH', 'AA1', 'P', 'S', 'K', 'AA2', 'CH']), 'horatian': ('NN', ['HH', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'horde': ('NN', ['HH', 'AO1', 'R', 'D']), 'horehound': ('NN', ['HH', 'AA1', 'R', 'HH', 'AW2', 'N', 'D']), 'horizon': ('NN', ['HH', 'ER0', 'AY1', 'Z', 'AH0', 'N']), 'horizontal': ('NN', ['HH', 'AO2', 'R', 'AH0', 'Z', 'AA1', 'N', 'T', 'AH0', 'L']), 'horizontally': ('RB', ['HH', 'AO2', 'R', 'IH0', 'Z', 'AA1', 'N', 'T', 'AH0', 'L', 'IY0']), 'horn': ('NN', ['HH', 'AO1', 'R', 'N']), 'hornbeak': ('NN', ['HH', 'AO1', 'R', 'N', 'B', 'AH0', 'K']), 'hornbill': ('NN', ['HH', 'AO1', 'R', 'N', 'B', 'IH2', 'L']), 'hornblende': ('NN', ['HH', 'AO1', 'R', 'N', 'B', 'L', 'EH2', 'N', 'D']), 'hornblower': ('NN', ['HH', 'AO1', 'R', 'N', 'B', 'L', 'OW2', 'ER0']), 'hornbook': ('NN', ['HH', 'AO1', 'R', 'N', 'B', 'UH2', 'K']), 'horned': ('VBN', ['HH', 'AO1', 'R', 'N', 'D']), 'horner': ('NN', ['HH', 'AO1', 'R', 'N', 'ER0']), 'hornet': ('NN', ['HH', 'AO1', 'R', 'N', 'IH0', 'T']), 'horning': ('VBG', ['HH', 'AO1', 'R', 'N', 'IH0', 'NG']), 'hornless': ('NN', ['HH', 'AO1', 'R', 'N', 'L', 'AH0', 'S']), 'hornpipe': ('NN', ['HH', 'AO1', 'R', 'N', 'P', 'AY0', 'P']), 'horny': ('NN', ['HH', 'AO1', 'R', 'N', 'IY0']), 'horoscope': ('NN', ['HH', 'AO1', 'R', 'AH0', 'S', 'K', 'OW2', 'P']), 'horrendous': ('JJ', ['HH', 'AO2', 'R', 'EH1', 'N', 'D', 'AH0', 'S']), 'horrible': ('JJ', ['HH', 'AO1', 'R', 'AH0', 'B', 'AH0', 'L']), 'horribly': ('RB', ['HH', 'AO1', 'R', 'AH0', 'B', 'L', 'IY0']), 'horrid': ('NN', ['HH', 'AO1', 'R', 'AH0', 'D']), 'horrific': ('NN', ['HH', 'AO0', 'R', 'IH1', 'F', 'IH0', 'K']), 'horrified': ('VBN', ['HH', 'AO1', 'R', 'AH0', 'F', 'AY2', 'D']), 'horrifying': ('VBG', ['HH', 'AO1', 'R', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'horrify': ('NN', ['HH', 'AO1', 'R', 'AH0', 'F', 'AY2']), 'horror': ('NN', ['HH', 'AO1', 'R', 'ER0']), 'horse': ('NN', ['HH', 'AO1', 'R', 'S']), 'horsed': ('VBN', ['HH', 'AO1', 'R', 'S', 'T']), 'horsing': ('VBG', ['HH', 'AO1', 'R', 'S', 'IH0', 'NG']), 'horseback': ('NN', ['HH', 'AO1', 'R', 'S', 'B', 'AE2', 'K']), 'horseflesh': ('NN', ['HH', 'AO1', 'R', 'S', 'F', 'L', 'EH2', 'SH']), 'horsehead': ('NN', ['HH', 'AO1', 'R', 'S', 'HH', 'EH2', 'D']), 'horsemen': ('NNS', ['HH', 'AO1', 'R', 'S', 'M', 'AH0', 'N']), 'horseman': ('NN', ['HH', 'AO1', 'R', 'S', 'M', 'AH0', 'N']), 'horsemanship': ('NN', ['HH', 'AO1', 'R', 'S', 'M', 'AH0', 'N', 'SH', 'IH0', 'P']), 'horseplay': ('NN', ['HH', 'AO1', 'R', 'S', 'P', 'L', 'EY2']), 'horseshoe': ('NN', ['HH', 'AO1', 'R', 'S', 'SH', 'UW2']), 'horsetail': ('NN', ['HH', 'AO1', 'R', 'S', 'T', 'EY2', 'L']), 'horticultural': ('JJ', ['HH', 'AO2', 'R', 'T', 'AH0', 'K', 'AH1', 'L', 'CH', 'ER0', 'AH0', 'L']), 'horticulture': ('NN', ['HH', 'AO1', 'R', 'T', 'IH0', 'K', 'AH2', 'L', 'CH', 'ER0']), 'horticulturist': ('NN', ['HH', 'AO2', 'R', 'T', 'IH0', 'K', 'AH1', 'L', 'CH', 'ER0', 'IH0', 'S', 'T']), 'hose': ('NN', ['HH', 'OW1', 'Z']), 'hosiery': ('NN', ['HH', 'OW1', 'ZH', 'ER0', 'IY0']), 'hospice': ('NN', ['HH', 'AA1', 'S', 'P', 'AH0', 'S']), 'hospitable': ('JJ', ['HH', 'AA1', 'S', 'P', 'IH1', 'T', 'AH0', 'B', 'AH0', 'L']), 'hospital': ('NN', ['HH', 'AA1', 'S', 'P', 'IH2', 'T', 'AH0', 'L']), 'hospitality': ('NN', ['HH', 'AA2', 'S', 'P', 'AH0', 'T', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'hospitalize': ('NN', ['HH', 'AO1', 'S', 'P', 'IH2', 'T', 'AH0', 'L', 'AY2', 'Z']), 'host': ('NN', ['HH', 'OW1', 'S', 'T']), 'hostage': ('NN', ['HH', 'AA1', 'S', 'T', 'IH0', 'JH']), 'hostel': ('NN', ['HH', 'AA1', 'S', 'T', 'AH0', 'L']), 'hostess': ('NN', ['HH', 'OW1', 'S', 'T', 'AH0', 'S']), 'hostile': ('NN', ['HH', 'AA1', 'S', 'T', 'AH0', 'L']), 'hostilities': ('NNS', ['HH', 'AA0', 'S', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'hostility': ('NN', ['HH', 'AA0', 'S', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'hosting': ('VBG', ['HH', 'OW1', 'S', 'T', 'IH0', 'NG']), 'hostler': ('NN', ['HH', 'AA1', 'S', 'L', 'ER0']), 'hotbed': ('NN', ['HH', 'AA1', 'T', 'B', 'EH2', 'D']), 'hotel': ('NN', ['HH', 'OW0', 'T', 'EH1', 'L']), 'hothouse': ('NN', ['HH', 'AA1', 'T', 'HH', 'AW2', 'S']), 'hotly': ('RB', ['HH', 'AA1', 'T', 'L', 'IY0']), 'hound': ('NN', ['HH', 'AW1', 'N', 'D']), 'hounded': ('VBD', ['HH', 'AW1', 'N', 'D', 'IH0', 'D']), 'hounding': ('VBG', ['HH', 'AW1', 'N', 'D', 'IH0', 'NG']), 'houp': ('NN', ['HH', 'UW1', 'P']), 'hour': ('NN', ['AW1', 'ER0']), 'hourglass': ('NN', ['AW1', 'ER0', 'G', 'L', 'AE2', 'S']), 'hourly': ('RB', ['AW1', 'R', 'L', 'IY0']), 'hours': ('NNS', ['AW1', 'ER0', 'Z']), 'houses': ('NNS', ['HH', 'AW1', 'S', 'AH0', 'Z']), 'house': ('NN', ['HH', 'AW1', 'S']), 'housed': ('VBN', ['HH', 'AW1', 'Z', 'D']), 'housing': ('NN', ['HH', 'AW1', 'Z', 'IH0', 'NG']), 'household': ('NN', ['HH', 'AW1', 'S', 'HH', 'OW2', 'L', 'D']), 'householder': ('NN', ['HH', 'AW1', 'S', 'HH', 'OW2', 'L', 'D', 'ER0']), 'housekeeper': ('NN', ['HH', 'AW1', 'S', 'K', 'IY2', 'P', 'ER0']), 'housekeeping': ('VBG', ['HH', 'AW1', 'S', 'K', 'IY2', 'P', 'IH0', 'NG']), 'housel': ('NN', ['HH', 'AW1', 'S', 'AH0', 'L']), 'housewarming': ('VBG', ['HH', 'AW1', 'S', 'W', 'AA2', 'M', 'IH0', 'NG']), 'housewife': ('NN', ['HH', 'AW1', 'S', 'W', 'AY2', 'F']), 'housework': ('NN', ['HH', 'AW1', 'S', 'W', 'ER2', 'K']), 'housewright': ('NN', ['HH', 'AW1', 'S', 'R', 'AY2', 'T']), 'hovel': ('NN', ['HH', 'AH1', 'V', 'AH0', 'L']), 'hover': ('NN', ['HH', 'AH1', 'V', 'ER0']), 'hovered': ('VBD', ['HH', 'AH1', 'V', 'ER0', 'D']), 'hovering': ('VBG', ['HH', 'AH1', 'V', 'ER0', 'IH0', 'NG']), 'how': ('WRB', ['HH', 'AW1']), 'howdy': ('NN', ['HH', 'AW1', 'D', 'IY0']), 'howell': ('NN', ['HH', 'AW1', 'AH0', 'L']), 'however': ('RB', ['HH', 'AW2', 'EH1', 'V', 'ER0']), 'howitzer': ('NN', ['HH', 'AW1', 'AH0', 'T', 'S', 'ER0']), 'howled': ('VBN', ['HH', 'AW1', 'L', 'D']), 'howling': ('VBG', ['HH', 'AW1', 'L', 'IH0', 'NG']), 'howl': ('NN', ['HH', 'AW1', 'L']), 'howler': ('NN', ['HH', 'AW1', 'L', 'ER0']), 'hoy': ('NN', ['HH', 'OY1']), 'hub': ('NN', ['HH', 'AH1', 'B']), 'hubbub': ('NN', ['HH', 'AH1', 'B', 'AH0', 'B']), 'hubby': ('NN', ['HH', 'AH1', 'B', 'IY0']), 'hubner': ('NN', ['HH', 'AH1', 'B', 'N', 'ER0']), 'huch': ('JJ', ['HH', 'AH1', 'CH']), 'huck': ('NN', ['HH', 'AH1', 'K']), 'huckle': ('NN', ['HH', 'AH1', 'K', 'AH0', 'L']), 'huckleberry': ('NN', ['HH', 'AH1', 'K', 'AH0', 'L', 'B', 'EH2', 'R', 'IY0']), 'huckster': ('NN', ['HH', 'AH1', 'K', 'S', 'T', 'ER0']), 'hud': ('NN', ['HH', 'AH1', 'D']), 'huddled': ('VBD', ['HH', 'AH1', 'D', 'AH0', 'L', 'D']), 'huddling': ('VBG', ['HH', 'AH1', 'D', 'AH0', 'L', 'IH0', 'NG']), 'huddle': ('NN', ['HH', 'AH1', 'D', 'AH0', 'L']), 'hue': ('NN', ['HH', 'Y', 'UW1']), 'hued': ('VBN', ['HH', 'Y', 'UW1', 'D']), 'huffed': ('NN', ['HH', 'AH1', 'F', 'T']), 'huffing': ('VBG', ['HH', 'AH1', 'F', 'IH0', 'NG']), 'huff': ('NN', ['HH', 'AH1', 'F']), 'huffer': ('NN', ['HH', 'AH1', 'F', 'ER0']), 'huffy': ('NN', ['HH', 'AH1', 'F', 'IY0']), 'hugged': ('VBN', ['HH', 'AH1', 'G', 'D']), 'hugging': ('VBG', ['HH', 'AH1', 'G', 'IH0', 'NG']), 'hug': ('NN', ['HH', 'AH1', 'G']), 'huge': ('JJ', ['HH', 'Y', 'UW1', 'JH']), 'hugger': ('NN', ['HH', 'AH1', 'G', 'ER0']), 'huguenot': ('NN', ['HH', 'Y', 'UW1', 'G', 'AH0', 'N', 'AA2', 'T']), 'hulk': ('NN', ['HH', 'AH1', 'L', 'K']), 'hulking': ('VBG', ['HH', 'AH1', 'L', 'K', 'IH0', 'NG']), 'hull': ('NN', ['HH', 'AH1', 'L']), 'hulled': ('VBN', ['HH', 'AH1', 'L', 'D']), 'hullabaloo': ('NN', ['HH', 'AH2', 'L', 'AH0', 'B', 'AH0', 'L', 'UW1']), 'humming': ('VBG', ['HH', 'AH1', 'M', 'IH0', 'NG']), 'hum': ('NN', ['HH', 'AH1', 'M']), 'human': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N']), 'humane': ('NN', ['HH', 'Y', 'UW0', 'M', 'EY1', 'N']), 'humanism': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'humanist': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'IH0', 'S', 'T']), 'humanistic': ('JJ', ['HH', 'Y', 'UW2', 'M', 'AH0', 'N', 'IH1', 'S', 'T', 'IH0', 'K']), 'humanitarian': ('JJ', ['HH', 'Y', 'UW2', 'M', 'AE2', 'N', 'AH0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'humanities': ('NNS', ['HH', 'Y', 'UW0', 'M', 'AE1', 'N', 'IH0', 'T', 'IY0', 'Z']), 'humanity': ('NN', ['HH', 'Y', 'UW0', 'M', 'AE1', 'N', 'IH0', 'T', 'IY0']), 'humanized': ('VBN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'AY2', 'Z', 'D']), 'humanizing': ('VBG', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'humanize': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'AY2', 'Z']), 'humankind': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'K', 'AY2', 'N', 'D']), 'humanly': ('RB', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'L', 'IY0']), 'humanness': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'N', 'N', 'AH0', 'S']), 'humble': ('JJ', ['HH', 'AH1', 'M', 'B', 'AH0', 'L']), 'humbled': ('VBN', ['HH', 'AH1', 'M', 'B', 'AH0', 'L', 'D']), 'humbling': ('VBG', ['HH', 'AH1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'humbler': ('NN', ['HH', 'AH1', 'M', 'B', 'AH0', 'L', 'ER0']), 'humbles': ('NNS', ['HH', 'AH1', 'M', 'B', 'AH0', 'L', 'Z']), 'humbly': ('RB', ['HH', 'AH1', 'M', 'B', 'L', 'IY0']), 'humbug': ('NN', ['HH', 'AH1', 'M', 'B', 'AH2', 'G']), 'humdrum': ('NN', ['HH', 'AH1', 'M', 'D', 'R', 'AH2', 'M']), 'humerus': ('NN', ['HH', 'Y', 'UW1', 'M', 'ER0', 'AH0', 'S']), 'humid': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'D']), 'humidity': ('NN', ['HH', 'Y', 'UW0', 'M', 'IH1', 'D', 'AH0', 'T', 'IY0']), 'humiliated': ('VBN', ['HH', 'Y', 'UW0', 'M', 'IH1', 'L', 'IY0', 'EY2', 'T', 'IH0', 'D']), 'humiliating': ('VBG', ['HH', 'Y', 'UW0', 'M', 'IH1', 'L', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'humiliate': ('NN', ['HH', 'Y', 'UW0', 'M', 'IH1', 'L', 'IY0', 'EY2', 'T']), 'humiliation': ('NN', ['HH', 'Y', 'UW0', 'M', 'IH2', 'L', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'humility': ('NN', ['HH', 'Y', 'UW0', 'M', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'hummel': ('NN', ['HH', 'AH1', 'M', 'AH0', 'L']), 'hummer': ('NN', ['HH', 'AH1', 'M', 'ER0']), 'humor': ('NN', ['HH', 'Y', 'UW1', 'M', 'ER0']), 'humored': ('VBN', ['HH', 'Y', 'UW1', 'M', 'ER0', 'D']), 'humoral': ('JJ', ['HH', 'Y', 'UW1', 'M', 'ER0', 'AH0', 'L']), 'humorist': ('NN', ['HH', 'Y', 'UW1', 'M', 'ER0', 'AH0', 'S', 'T']), 'humorless': ('NN', ['HH', 'Y', 'UW1', 'M', 'ER0', 'L', 'AH0', 'S']), 'humorous': ('JJ', ['HH', 'Y', 'UW1', 'M', 'ER0', 'AH0', 'S']), 'humorously': ('RB', ['HH', 'Y', 'UW1', 'M', 'ER0', 'AH0', 'S', 'L', 'IY0']), 'hump': ('NN', ['HH', 'AH1', 'M', 'P']), 'humpback': ('NN', ['HH', 'AH1', 'M', 'P', 'B', 'AE2', 'K']), 'humped': ('NN', ['HH', 'AH1', 'M', 'P', 'T']), 'humph': ('NN', ['HH', 'AH1', 'M', 'F']), 'humulin': ('NN', ['HH', 'Y', 'UW2', 'M', 'Y', 'UW1', 'L', 'IH0', 'N']), 'humus': ('NN', ['HH', 'Y', 'UW1', 'M', 'AH0', 'S']), 'hun': ('NN', ['HH', 'AH1', 'N']), 'hunch': ('NN', ['HH', 'AH1', 'N', 'CH']), 'hunched': ('VBN', ['HH', 'AH1', 'N', 'CH', 'T']), 'hunchback': ('NN', ['HH', 'AH1', 'N', 'CH', 'B', 'AE2', 'K']), 'hundred': ('VBN', ['HH', 'AH1', 'N', 'D', 'R', 'AH0', 'D']), 'hundredth': ('NN', ['HH', 'AH1', 'N', 'D', 'R', 'AH0', 'D', 'TH']), 'hundredweight': ('NN', ['HH', 'AH1', 'N', 'D', 'R', 'AH0', 'D', 'W', 'EY2', 'T']), 'hungarian': ('NN', ['HH', 'AH0', 'NG', 'G', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'hungary': ('NN', ['HH', 'AH1', 'NG', 'G', 'ER0', 'IY0']), 'hunger': ('NN', ['HH', 'AH1', 'NG', 'G', 'ER0']), 'hungering': ('VBG', ['HH', 'AH1', 'NG', 'G', 'ER0', 'IH0', 'NG']), 'hungrily': ('RB', ['HH', 'AH1', 'NG', 'G', 'R', 'AH0', 'L', 'IY0']), 'hungry': ('NN', ['HH', 'AH1', 'NG', 'G', 'R', 'IY0']), 'hunk': ('NN', ['HH', 'AH1', 'NG', 'K']), 'hunker': ('NN', ['HH', 'AH1', 'NG', 'K', 'ER0']), 'hunks': ('NNS', ['HH', 'AH1', 'NG', 'K', 'S']), 'hunted': ('VBN', ['HH', 'AH1', 'N', 'T', 'AH0', 'D']), 'hunting': ('VBG', ['HH', 'AH1', 'N', 'T', 'IH0', 'NG']), 'hunt': ('NN', ['HH', 'AH1', 'N', 'T']), 'hunte': ('NN', ['HH', 'AH1', 'N', 'T']), 'hunter': ('NN', ['HH', 'AH1', 'N', 'T', 'ER0']), 'huntress': ('NN', ['HH', 'AH1', 'N', 'T', 'R', 'IH0', 'S']), 'huntsman': ('NN', ['HH', 'AH1', 'N', 'T', 'S', 'M', 'AH0', 'N']), 'hurdle': ('NN', ['HH', 'ER1', 'D', 'AH0', 'L']), 'hurled': ('VBN', ['HH', 'ER1', 'L', 'D']), 'hurling': ('VBG', ['HH', 'ER1', 'L', 'IH0', 'NG']), 'hurl': ('NN', ['HH', 'ER1', 'L']), 'hurly': ('RB', ['HH', 'ER1', 'L', 'IY0']), 'hurrah': ('NN', ['HH', 'UH0', 'R', 'AA1']), 'hurricane': ('NN', ['HH', 'ER1', 'AH0', 'K', 'EY2', 'N']), 'hurried': ('VBN', ['HH', 'ER1', 'IY0', 'D']), 'hurries': ('NNS', ['HH', 'ER1', 'IY0', 'Z']), 'hurrying': ('VBG', ['HH', 'ER1', 'IY0', 'IH0', 'NG']), 'hurry': ('NN', ['HH', 'ER1', 'IY0']), 'hurst': ('NN', ['HH', 'ER1', 'S', 'T']), 'hurt': ('NN', ['HH', 'ER1', 'T']), 'hurting': ('VBG', ['HH', 'ER1', 'T', 'IH0', 'NG']), 'hurtful': ('NN', ['HH', 'ER1', 'T', 'F', 'AH0', 'L']), 'hurtling': ('VBG', ['HH', 'ER1', 'T', 'L', 'IH0', 'NG']), 'hurtle': ('NN', ['HH', 'ER1', 'T', 'AH0', 'L']), 'husband': ('NN', ['HH', 'AH1', 'Z', 'B', 'AH0', 'N', 'D']), 'husbandry': ('NN', ['HH', 'AH1', 'Z', 'B', 'AH0', 'N', 'D', 'R', 'IY0']), 'hushed': ('VBN', ['HH', 'AH1', 'SH', 'T']), 'hush': ('NN', ['HH', 'AH1', 'SH']), 'husk': ('NN', ['HH', 'AH1', 'S', 'K']), 'husky': ('NN', ['HH', 'AH1', 'S', 'K', 'IY0']), 'hussar': ('NN', ['HH', 'AH1', 'S', 'ER0']), 'hussite': ('NN', ['HH', 'AH1', 'S', 'AY2', 'T']), 'hussy': ('NN', ['HH', 'AH1', 'S', 'IY0']), 'hustings': ('NNS', ['HH', 'AH1', 'S', 'T', 'IH0', 'NG', 'Z']), 'hustled': ('VBN', ['HH', 'AH1', 'S', 'AH0', 'L', 'D']), 'hustling': ('VBG', ['HH', 'AH1', 'S', 'AH0', 'L', 'IH0', 'NG']), 'hustle': ('NN', ['HH', 'AH1', 'S', 'AH0', 'L']), 'hut': ('NN', ['HH', 'AH1', 'T']), 'hutch': ('NN', ['HH', 'AH1', 'CH']), 'hy': ('NN', ['HH', 'AY1']), 'hyacinth': ('NN', ['HH', 'AY1', 'AH0', 'S', 'IH2', 'N', 'TH']), 'hyades': ('NNS', ['HH', 'AY1', 'AH0', 'D', 'IY2', 'Z']), 'hybrid': ('NN', ['HH', 'AY1', 'B', 'R', 'AH0', 'D']), 'hybridization': ('NN', ['HH', 'AY2', 'B', 'R', 'AH0', 'D', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'hybridize': ('NN', ['HH', 'AY1', 'B', 'R', 'AH0', 'D', 'AY2', 'Z']), 'hydras': ('NNS', ['HH', 'AY1', 'D', 'R', 'AH0', 'Z']), 'hydra': ('NN', ['HH', 'AY1', 'D', 'R', 'AH0']), 'hydrant': ('NN', ['HH', 'AY1', 'D', 'R', 'AH0', 'N', 'T']), 'hydrate': ('NN', ['HH', 'AY1', 'D', 'R', 'EY2', 'T']), 'hydrated': ('VBN', ['HH', 'AY1', 'D', 'R', 'EY2', 'T', 'AH0', 'D']), 'hydration': ('NN', ['HH', 'AY0', 'D', 'R', 'EY1', 'SH', 'AH0', 'N']), 'hydraulic': ('NN', ['HH', 'AY0', 'D', 'R', 'AO1', 'L', 'IH0', 'K']), 'hydraulics': ('NNS', ['HH', 'AY0', 'D', 'R', 'AO1', 'L', 'IH0', 'K', 'S']), 'hydrazine': ('NN', ['HH', 'AY1', 'D', 'R', 'AH0', 'Z', 'IY2', 'N']), 'hydride': ('NN', ['HH', 'AY1', 'D', 'R', 'AY2', 'D']), 'hydrocarbon': ('NN', ['HH', 'AY2', 'D', 'R', 'OW0', 'K', 'AA1', 'R', 'B', 'AH0', 'N']), 'hydrogen': ('NN', ['HH', 'AY1', 'D', 'R', 'AH0', 'JH', 'AH0', 'N']), 'hydrogenated': ('VBN', ['HH', 'AY1', 'D', 'R', 'AH0', 'JH', 'AH0', 'N', 'EY2', 'T', 'IH0', 'D']), 'hydrogenating': ('VBG', ['HH', 'AY1', 'D', 'R', 'AH0', 'JH', 'AH0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'hydrogenate': ('NN', ['HH', 'AY1', 'D', 'R', 'AH0', 'JH', 'AH0', 'N', 'EY2', 'T']), 'hydrogenation': ('NN', ['HH', 'AY2', 'D', 'R', 'AA2', 'JH', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'hydrographic': ('JJ', ['HH', 'AY2', 'D', 'R', 'AH0', 'G', 'R', 'AE1', 'F', 'IH0', 'K']), 'hydrometer': ('NN', ['HH', 'AY0', 'D', 'R', 'AA1', 'M', 'AH0', 'T', 'ER0']), 'hydrotherapy': ('NN', ['HH', 'AY2', 'D', 'R', 'OW0', 'TH', 'EH1', 'R', 'AH0', 'P', 'IY0']), 'hydrothermal': ('NN', ['HH', 'AY2', 'D', 'R', 'OW0', 'TH', 'ER1', 'M', 'AH0', 'L']), 'hydrous': ('JJ', ['HH', 'AY1', 'D', 'R', 'AH0', 'S']), 'hydroxide': ('NN', ['HH', 'AY0', 'D', 'R', 'AA1', 'K', 'S', 'AY0', 'D']), 'hye': ('NN', ['HH', 'AY0']), 'hyenas': ('NNS', ['HH', 'AY0', 'IY1', 'N', 'AH0', 'Z']), 'hyena': ('NN', ['HH', 'AY0', 'IY1', 'N', 'AH0']), 'hygeia': ('NN', ['HH', 'AY2', 'JH', 'EY1', 'AH0']), 'hygiene': ('NN', ['HH', 'AY1', 'JH', 'IY2', 'N']), 'hygienist': ('NN', ['HH', 'AY2', 'G', 'IY1', 'N', 'IH0', 'S', 'T']), 'hygrometer': ('NN', ['HH', 'AY0', 'G', 'R', 'AA1', 'M', 'AH0', 'T', 'ER0']), 'hymen': ('NNS', ['HH', 'AY1', 'M', 'AH0', 'N']), 'hymn': ('NN', ['HH', 'IH1', 'M']), 'hymnal': ('NN', ['HH', 'IH1', 'M', 'N', 'AH0', 'L']), 'hymnology': ('NN', ['HH', 'IH0', 'M', 'N', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'hyperbola': ('NN', ['HH', 'AY0', 'P', 'ER1', 'B', 'AH0', 'L', 'AH0']), 'hyperbole': ('NN', ['HH', 'AY0', 'P', 'ER1', 'B', 'AH0', 'L', 'IY2']), 'hyperbolic': ('NN', ['HH', 'AY2', 'P', 'ER0', 'B', 'AA1', 'L', 'IH0', 'K']), 'hyperborean': ('NN', ['HH', 'AY2', 'P', 'ER0', 'B', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'hypercritical': ('JJ', ['HH', 'AY2', 'P', 'ER0', 'K', 'R', 'IH1', 'T', 'IH0', 'K', 'AH0', 'L']), 'hyperion': ('NN', ['HH', 'AY0', 'P', 'IH1', 'R', 'IY0', 'AH0', 'N']), 'hyperkinetic': ('JJ', ['HH', 'AY2', 'P', 'ER0', 'K', 'IH0', 'N', 'EH1', 'T', 'IH0', 'K']), 'hyperopia': ('NN', ['HH', 'AY2', 'P', 'ER0', 'OW1', 'P', 'IY0', 'AH0']), 'hyperplasia': ('NN', ['HH', 'AY2', 'P', 'ER0', 'P', 'L', 'EY1', 'ZH', 'AH0']), 'hypertrophic': ('JJ', ['HH', 'AY2', 'P', 'ER0', 'T', 'R', 'OW1', 'F', 'IH0', 'K']), 'hypertrophy': ('NN', ['HH', 'AY1', 'P', 'ER0', 'T', 'R', 'OW2', 'F', 'IY0']), 'hyphae': ('NN', ['HH', 'AY1', 'F', 'IY2']), 'hyphen': ('NN', ['HH', 'AY1', 'F', 'AH0', 'N']), 'hyphenated': ('VBN', ['HH', 'AY1', 'F', 'AH0', 'N', 'EY2', 'T', 'IH0', 'D']), 'hypnosis': ('NN', ['HH', 'IH0', 'P', 'N', 'OW1', 'S', 'AH0', 'S']), 'hypnotic': ('JJ', ['HH', 'IH0', 'P', 'N', 'AA1', 'T', 'IH0', 'K']), 'hypnotism': ('NN', ['HH', 'IH1', 'P', 'N', 'AH0', 'T', 'IH2', 'Z', 'AH0', 'M']), 'hypnotized': ('VBN', ['HH', 'IH1', 'P', 'N', 'AH0', 'T', 'AY2', 'Z', 'D']), 'hypnotize': ('NN', ['HH', 'IH1', 'P', 'N', 'AH0', 'T', 'AY2', 'Z']), 'hypo': ('NN', ['HH', 'AY1', 'P', 'OW0']), 'hypochondria': ('NN', ['HH', 'AY2', 'P', 'AH0', 'K', 'AA1', 'N', 'D', 'R', 'IY0', 'AH0']), 'hypochondriac': ('NN', ['HH', 'AY2', 'P', 'AH0', 'K', 'AA1', 'N', 'D', 'R', 'IY0', 'AE0', 'K']), 'hypocrisy': ('NN', ['HH', 'IH0', 'P', 'AA1', 'K', 'R', 'AH0', 'S', 'IY0']), 'hypocrite': ('NN', ['HH', 'IH1', 'P', 'AH0', 'K', 'R', 'IH2', 'T']), 'hypocritical': ('JJ', ['HH', 'IH2', 'P', 'AH0', 'K', 'R', 'IH1', 'T', 'IH0', 'K', 'AH0', 'L']), 'hypodermic': ('NN', ['HH', 'AY2', 'P', 'AH0', 'D', 'ER1', 'M', 'IH0', 'K']), 'hypotheses': ('NNS', ['HH', 'AY0', 'P', 'AA1', 'TH', 'AH0', 'S', 'IY2', 'Z']), 'hypothesis': ('NN', ['HH', 'AY0', 'P', 'AA1', 'TH', 'AH0', 'S', 'AH0', 'S']), 'hypothetical': ('JJ', ['HH', 'AY2', 'P', 'AH0', 'TH', 'EH1', 'T', 'AH0', 'K', 'AH0', 'L']), 'hyson': ('NN', ['HH', 'AY1', 'S', 'AH0', 'N']), 'hyssop': ('NN', ['HH', 'IH1', 'S', 'AH0', 'P']), 'hysteria': ('NN', ['HH', 'IH0', 'S', 'T', 'EH1', 'R', 'IY0', 'AH0']), 'hysteric': ('NN', ['HH', 'IH2', 'S', 'T', 'EH1', 'R', 'IH0', 'K']), 'hysterical': ('JJ', ['HH', 'IH0', 'S', 'T', 'EH1', 'R', 'IH0', 'K', 'AH0', 'L']), 'hysterics': ('NNS', ['HH', 'IH2', 'S', 'T', 'EH1', 'R', 'IH0', 'K', 'S']), 'i': ('NN', ['AY1']), 'we': ('PRP', ['W', 'IY1']), 'our': ('PRP$', ['AW1', 'ER0']), 'ours': ('NNS', ['AW1', 'ER0', 'Z']), 'us': ('PRP', ['AH1', 'S']), 'ianthina': ('NN', ['IY0', 'AH0', 'N', 'TH', 'IY1', 'N', 'AH0']), 'iberian': ('JJ', ['AY0', 'B', 'IH1', 'R', 'IY0', 'AH0', 'N']), 'ibex': ('NN', ['AY1', 'B', 'EH0', 'K', 'S']), 'ibis': ('NN', ['AY1', 'B', 'AH0', 'S']), 'j': ('NN', ['JH', 'EY1']), 'jab': ('NN', ['JH', 'AE1', 'B']), 'jacinth': ('NN', ['JH', 'AE1', 'S', 'IH0', 'N', 'TH']), 'jack': ('NN', ['JH', 'AE1', 'K']), 'jackal': ('NN', ['JH', 'AE1', 'K', 'AH0', 'L']), 'jackass': ('NN', ['JH', 'AE1', 'K', 'AE2', 'S']), 'jacket': ('NN', ['JH', 'AE1', 'K', 'AH0', 'T']), 'jacketed': ('VBN', ['JH', 'AE1', 'K', 'AH0', 'T', 'IH0', 'D']), 'jackman': ('NN', ['JH', 'AE1', 'K', 'M', 'AH0', 'N']), 'jacob': ('NN', ['JH', 'EY1', 'K', 'AH0', 'B']), 'jacobean': ('NN', ['JH', 'AE2', 'K', 'AH0', 'B', 'IY1', 'AH0', 'N']), 'jacobin': ('NN', ['JH', 'AE1', 'K', 'AH0', 'B', 'AH0', 'N']), 'jacobine': ('NN', ['JH', 'AE1', 'K', 'AH0', 'B', 'AY2', 'N']), 'jacobite': ('NN', ['JH', 'AE1', 'K', 'AH0', 'B', 'AY2', 'T']), 'jacobus': ('NN', ['JH', 'AH0', 'K', 'OW1', 'B', 'AH0', 'S']), 'jade': ('NN', ['JH', 'EY1', 'D']), 'jaded': ('VBD', ['JH', 'EY1', 'D', 'AH0', 'D']), 'jadeite': ('NN', ['JH', 'EY1', 'D', 'AY2', 'T']), 'jaeger': ('NN', ['JH', 'EH1', 'G', 'ER0']), 'jagged': ('NN', ['JH', 'AE1', 'G', 'D']), 'jager': ('NN', ['JH', 'EY1', 'G', 'ER0']), 'jagger': ('NN', ['JH', 'AE1', 'G', 'ER0']), 'jaguar': ('NN', ['JH', 'AE1', 'G', 'W', 'AA2', 'R']), 'jail': ('NN', ['JH', 'EY1', 'L']), 'jailer': ('NN', ['JH', 'EY1', 'L', 'ER0']), 'jain': ('NN', ['JH', 'AY1', 'N']), 'jainism': ('NN', ['JH', 'EY1', 'N', 'IH0', 'Z', 'AH0', 'M']), 'jakes': ('NNS', ['JH', 'EY1', 'K', 'S']), 'jakie': ('NN', ['JH', 'AE1', 'K', 'IY0']), 'jam': ('NN', ['JH', 'AE1', 'M']), 'jammed': ('NN', ['JH', 'AE1', 'M', 'D']), 'jamming': ('NN', ['JH', 'AE1', 'M', 'IH0', 'NG']), 'jamaica': ('NN', ['JH', 'AH0', 'M', 'EY1', 'K', 'AH0']), 'jamaican': ('NN', ['JH', 'AH0', 'M', 'EY1', 'K', 'AH0', 'N']), 'jamb': ('NN', ['JH', 'AE1', 'M']), 'jan': ('NN', ['JH', 'AE1', 'N']), 'jane': ('NN', ['JH', 'EY1', 'N']), 'jangled': ('NNS', ['JH', 'AE1', 'NG', 'G', 'AH0', 'L', 'D']), 'jangle': ('NN', ['JH', 'AE1', 'NG', 'G', 'AH0', 'L']), 'janitor': ('NN', ['JH', 'AE1', 'N', 'AH0', 'T', 'ER0']), 'jansenism': ('NN', ['JH', 'AE1', 'N', 'S', 'IH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'jansenist': ('NN', ['JH', 'AE1', 'N', 'S', 'AH0', 'N', 'AH0', 'S', 'T']), 'janthina': ('NN', ['Y', 'AA0', 'N', 'TH', 'IY1', 'N', 'AH0']), 'january': ('NN', ['JH', 'AE1', 'N', 'Y', 'UW0', 'EH2', 'R', 'IY0']), 'janus': ('NN', ['JH', 'EY1', 'N', 'AH0', 'S']), 'japan': ('NN', ['JH', 'AH0', 'P', 'AE1', 'N']), 'japanese': ('JJ', ['JH', 'AE2', 'P', 'AH0', 'N', 'IY1', 'Z']), 'japonica': ('NN', ['JH', 'AH0', 'P', 'AA1', 'N', 'AH0', 'K', 'AH0']), 'jar': ('NN', ['JH', 'AA1', 'R']), 'jarred': ('NN', ['JH', 'AA1', 'R', 'D']), 'jarring': ('NN', ['JH', 'AA1', 'R', 'IH0', 'NG']), 'jargon': ('NN', ['JH', 'AA1', 'R', 'G', 'AH0', 'N']), 'jasmine': ('NN', ['JH', 'AE1', 'Z', 'M', 'AH0', 'N']), 'jasper': ('NN', ['JH', 'AE1', 'S', 'P', 'ER0']), 'jaundice': ('NN', ['JH', 'AO1', 'N', 'D', 'AH0', 'S']), 'jaundiced': ('NNS', ['JH', 'AO1', 'N', 'D', 'IH0', 'S', 'T']), 'jaunt': ('NN', ['JH', 'AO1', 'N', 'T']), 'jauntily': ('RB', ['JH', 'AO1', 'N', 'T', 'AH0', 'L', 'IY0']), 'jaunty': ('NN', ['JH', 'AO1', 'N', 'T', 'IY0']), 'java': ('NN', ['JH', 'AA1', 'V', 'AH0']), 'javanese': ('NN', ['JH', 'AA2', 'V', 'AH0', 'N', 'IY1', 'Z']), 'javelin': ('NN', ['JH', 'AE1', 'V', 'AH0', 'L', 'AH0', 'N']), 'jaw': ('NN', ['JH', 'AO1']), 'jawed': ('NN', ['JH', 'AO1', 'D']), 'jawbone': ('NN', ['JH', 'AO1', 'B', 'OW2', 'N']), 'jay': ('NN', ['JH', 'EY1']), 'jayhawker': ('NN', ['JH', 'EY1', 'HH', 'AO0', 'K', 'ER0']), 'jealous': ('JJ', ['JH', 'EH1', 'L', 'AH0', 'S']), 'jealously': ('RB', ['JH', 'EH1', 'L', 'AH0', 'S', 'L', 'IY0']), 'jealousness': ('NN', ['JH', 'EH1', 'L', 'AH0', 'S', 'N', 'IH0', 'S']), 'jealousies': ('NNS', ['JH', 'EH1', 'L', 'AH0', 'S', 'IY0', 'Z']), 'jealousy': ('NN', ['JH', 'EH1', 'L', 'AH0', 'S', 'IY0']), 'jean': ('NN', ['JH', 'IY1', 'N']), 'jee': ('NN', ['JH', 'IY1']), 'jeer': ('NN', ['JH', 'IH1', 'R']), 'jeered': ('NN', ['JH', 'IH1', 'R', 'D']), 'jeering': ('NN', ['JH', 'IY1', 'R', 'IH0', 'NG']), 'jeers': ('NNS', ['JH', 'IY1', 'R', 'Z']), 'jeffersonian': ('NN', ['JH', 'EH2', 'F', 'ER0', 'S', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'jehovah': ('NN', ['JH', 'AH0', 'HH', 'OW1', 'V', 'AH0']), 'jell': ('NN', ['JH', 'EH1', 'L']), 'jellied': ('NNS', ['JH', 'EH1', 'L', 'IY0', 'D']), 'jellies': ('NNS', ['JH', 'EH1', 'L', 'IY0', 'Z']), 'jelly': ('RB', ['JH', 'EH1', 'L', 'IY0']), 'jellyfish': ('NN', ['JH', 'EH1', 'L', 'IY0', 'F', 'IH2', 'SH']), 'jenkins': ('NNS', ['JH', 'EH1', 'NG', 'K', 'AH0', 'N', 'Z']), 'jenny': ('NN', ['JH', 'EH1', 'N', 'IY0']), 'jeopardized': ('NN', ['JH', 'EH1', 'P', 'ER0', 'D', 'AY2', 'Z', 'D']), 'jeopardizing': ('NN', ['JH', 'EH1', 'P', 'ER0', 'D', 'AY2', 'Z', 'IH0', 'NG']), 'jeopardize': ('NN', ['JH', 'EH1', 'P', 'ER0', 'D', 'AY2', 'Z']), 'jeopardy': ('NN', ['JH', 'EH1', 'P', 'ER0', 'D', 'IY0']), 'jeremiad': ('NN', ['JH', 'EH2', 'R', 'AH0', 'M', 'AY1', 'AH0', 'D']), 'jerk': ('NN', ['JH', 'ER1', 'K']), 'jerked': ('NNS', ['JH', 'ER1', 'K', 'T']), 'jerking': ('NN', ['JH', 'ER1', 'K', 'IH0', 'NG']), 'jerky': ('NN', ['JH', 'ER1', 'K', 'IY0']), 'jerseys': ('NNS', ['JH', 'ER1', 'Z', 'IY0', 'Z']), 'jersey': ('NN', ['JH', 'ER1', 'Z', 'IY0']), 'jerusalem': ('NN', ['JH', 'ER0', 'UW1', 'S', 'AH0', 'L', 'AH0', 'M']), 'jess': ('NN', ['JH', 'EH1', 'S']), 'jessamine': ('NN', ['JH', 'EH1', 'S', 'AH0', 'M', 'IH0', 'N']), 'jesse': ('NN', ['JH', 'EH1', 'S', 'IY0']), 'jest': ('NN', ['JH', 'EH1', 'S', 'T']), 'jester': ('NN', ['JH', 'EH1', 'S', 'T', 'ER0']), 'jesuit': ('NN', ['JH', 'EH1', 'ZH', 'UW0', 'IH0', 'T']), 'jesus': ('NN', ['JH', 'IY1', 'Z', 'AH0', 'S']), 'jet': ('NN', ['JH', 'EH1', 'T']), 'jetted': ('VBN', ['JH', 'EH1', 'T', 'IH0', 'D']), 'jetting': ('NN', ['JH', 'EH1', 'T', 'IH0', 'NG']), 'jetson': ('NN', ['JH', 'EH1', 'T', 'S', 'AH0', 'N']), 'jetter': ('NN', ['JH', 'EH1', 'T', 'ER0']), 'jettison': ('NN', ['JH', 'EH1', 'T', 'IH0', 'S', 'AH0', 'N']), 'jetton': ('NN', ['JH', 'EH1', 'T', 'AH0', 'N']), 'jetty': ('NN', ['JH', 'EH1', 'T', 'IY0']), 'jew': ('NN', ['JH', 'UW1']), 'jewel': ('NN', ['JH', 'UW1', 'AH0', 'L']), 'jeweler': ('NN', ['JH', 'UW1', 'AH0', 'L', 'ER0']), 'jewelry': ('NN', ['JH', 'UW1', 'AH0', 'L', 'R', 'IY0']), 'jewess': ('NN', ['JH', 'UW1', 'AH0', 'S']), 'jewfish': ('NN', ['JH', 'UW1', 'F', 'IH2', 'SH']), 'jewish': ('NN', ['JH', 'UW1', 'IH0', 'SH']), 'jewry': ('NN', ['JH', 'UW1', 'R', 'IY0']), 'jib': ('NN', ['JH', 'IH1', 'B']), 'jibe': ('NN', ['JH', 'AY1', 'B']), 'jiffy': ('NN', ['JH', 'IH1', 'F', 'IY0']), 'jig': ('NN', ['JH', 'IH1', 'G']), 'jiggle': ('NN', ['JH', 'IH1', 'G', 'AH0', 'L']), 'jill': ('NN', ['JH', 'IH1', 'L']), 'jilt': ('NN', ['JH', 'IH1', 'L', 'T']), 'jilted': ('VBN', ['JH', 'IH1', 'L', 'T', 'IH0', 'D']), 'jimmy': ('NN', ['JH', 'IH1', 'M', 'IY0']), 'jin': ('NN', ['JH', 'IH1', 'N']), 'jingle': ('NN', ['JH', 'IH1', 'NG', 'G', 'AH0', 'L']), 'jingoism': ('NN', ['JH', 'IH1', 'NG', 'G', 'OW2', 'IH0', 'Z', 'AH0', 'M']), 'joes': ('NNS', ['JH', 'OW1', 'Z']), 'jo': ('NN', ['JH', 'OW1']), 'job': ('NN', ['JH', 'AA1', 'B']), 'jobber': ('NN', ['JH', 'AA1', 'B', 'ER0']), 'jockeys': ('NNS', ['JH', 'AA1', 'K', 'IY0', 'Z']), 'jockey': ('NN', ['JH', 'AA1', 'K', 'IY0']), 'jockeying': ('NN', ['JH', 'AA1', 'K', 'IY0', 'IH0', 'NG']), 'jocular': ('NN', ['JH', 'AA1', 'K', 'Y', 'AH0', 'L', 'ER0']), 'joe': ('NN', ['JH', 'OW1']), 'jogged': ('NN', ['JH', 'AA1', 'G', 'D']), 'jogging': ('NN', ['JH', 'AA1', 'G', 'IH0', 'NG']), 'jog': ('NN', ['JH', 'AA1', 'G']), 'jogger': ('NN', ['JH', 'AA1', 'G', 'ER0']), 'johannes': ('NNS', ['JH', 'OW0', 'HH', 'AE1', 'N', 'AH0', 'S']), 'john': ('NN', ['JH', 'AA1', 'N']), 'johnny': ('NN', ['JH', 'AA1', 'N', 'IY0']), 'joined': ('VBD', ['JH', 'OY1', 'N', 'D']), 'joining': ('VBG', ['JH', 'OY1', 'N', 'IH0', 'NG']), 'join': ('NN', ['JH', 'OY1', 'N']), 'joiner': ('NN', ['JH', 'OY1', 'N', 'ER0']), 'joinery': ('NN', ['JH', 'OY1', 'N', 'ER0', 'IY0']), 'joint': ('NN', ['JH', 'OY1', 'N', 'T']), 'jointed': ('VBN', ['JH', 'OY1', 'N', 'T', 'AH0', 'D']), 'jointly': ('RB', ['JH', 'OY1', 'N', 'T', 'L', 'IY0']), 'joist': ('NN', ['JH', 'OY1', 'S', 'T']), 'joke': ('NN', ['JH', 'OW1', 'K']), 'joked': ('NNS', ['JH', 'OW1', 'K', 'T']), 'joking': ('NN', ['JH', 'OW1', 'K', 'IH0', 'NG']), 'joker': ('NN', ['JH', 'OW1', 'K', 'ER0']), 'jokingly': ('RB', ['JH', 'OW1', 'K', 'IH0', 'NG', 'L', 'IY0']), 'jolly': ('RB', ['JH', 'AA1', 'L', 'IY0']), 'jolted': ('VBN', ['JH', 'OW1', 'L', 'T', 'IH0', 'D']), 'jolt': ('NN', ['JH', 'OW1', 'L', 'T']), 'jonah': ('NN', ['JH', 'OW1', 'N', 'AH0']), 'jordan': ('NN', ['JH', 'AO1', 'R', 'D', 'AH0', 'N']), 'jorden': ('NN', ['JH', 'AO1', 'R', 'D', 'AH0', 'N']), 'joseph': ('NN', ['JH', 'OW1', 'S', 'AH0', 'F']), 'joss': ('NN', ['JH', 'AO1', 'S']), 'jostled': ('NNS', ['JH', 'AA1', 'S', 'AH0', 'L', 'D']), 'jostling': ('NN', ['JH', 'AA1', 'S', 'AH0', 'L', 'IH0', 'NG']), 'jostle': ('NN', ['JH', 'AA1', 'S', 'AH0', 'L']), 'jot': ('NN', ['JH', 'AA1', 'T']), 'jotted': ('VBN', ['JH', 'AA1', 'T', 'IH0', 'D']), 'joule': ('NN', ['JH', 'UW1', 'L']), 'journal': ('NN', ['JH', 'ER1', 'N', 'AH0', 'L']), 'journalism': ('NN', ['JH', 'ER1', 'N', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'journalist': ('NN', ['JH', 'ER1', 'N', 'AH0', 'L', 'AH0', 'S', 'T']), 'journalistic': ('JJ', ['JH', 'ER2', 'N', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K']), 'journeys': ('NNS', ['JH', 'ER1', 'N', 'IY0', 'Z']), 'journey': ('NN', ['JH', 'ER1', 'N', 'IY0']), 'journeyed': ('NN', ['JH', 'ER1', 'N', 'IY0', 'D']), 'journeying': ('NN', ['JH', 'ER1', 'N', 'IY0', 'IH0', 'NG']), 'journeyman': ('NN', ['JH', 'ER1', 'N', 'IY0', 'M', 'AE2', 'N']), 'joust': ('NN', ['JH', 'AW1', 'S', 'T']), 'jovial': ('NN', ['JH', 'OW1', 'V', 'IY0', 'AH0', 'L']), 'jovian': ('NN', ['JH', 'OW1', 'V', 'IY0', 'AH0', 'N']), 'jowl': ('NN', ['JH', 'AW1', 'L']), 'joy': ('NN', ['JH', 'OY1']), 'joying': ('NN', ['JH', 'OY1', 'IH0', 'NG']), 'joyful': ('NN', ['JH', 'OY1', 'F', 'AH0', 'L']), 'joyous': ('JJ', ['JH', 'OY1', 'AH0', 'S']), 'juba': ('NN', ['JH', 'UW1', 'B', 'AH0']), 'jubilant': ('NN', ['JH', 'UW1', 'B', 'AH0', 'L', 'AH0', 'N', 'T']), 'jubilation': ('NN', ['JH', 'UW2', 'B', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'jubilee': ('NN', ['JH', 'UW1', 'B', 'AH0', 'L', 'IY2']), 'judaism': ('NN', ['JH', 'UW1', 'D', 'EY0', 'IH2', 'Z', 'AH0', 'M']), 'judas': ('NN', ['JH', 'UW1', 'D', 'AH0', 'S']), 'judge': ('NN', ['JH', 'AH1', 'JH']), 'judged': ('NN', ['JH', 'AH1', 'JH', 'D']), 'judging': ('NN', ['JH', 'AH1', 'JH', 'IH0', 'NG']), 'judgeship': ('NN', ['JH', 'AH1', 'JH', 'SH', 'IH2', 'P']), 'judgment': ('NN', ['JH', 'AH1', 'JH', 'M', 'AH0', 'N', 'T']), 'judicial': ('NN', ['JH', 'UW0', 'D', 'IH1', 'SH', 'AH0', 'L']), 'judicially': ('RB', ['JH', 'UW0', 'D', 'IH1', 'SH', 'AH0', 'L', 'IY0']), 'judiciary': ('NN', ['JH', 'UW0', 'D', 'IH1', 'SH', 'IY0', 'EH2', 'R', 'IY0']), 'judicious': ('JJ', ['JH', 'UW0', 'D', 'IH1', 'SH', 'AH0', 'S']), 'judiciously': ('RB', ['JH', 'UW0', 'D', 'IH1', 'SH', 'IH0', 'S', 'L', 'IY0']), 'jug': ('NN', ['JH', 'AH1', 'G']), 'juge': ('NN', ['JH', 'UW1', 'JH']), 'juggernaut': ('NN', ['JH', 'AH1', 'G', 'ER0', 'N', 'AO2', 'T']), 'juggled': ('NNS', ['JH', 'AH1', 'G', 'AH0', 'L', 'D']), 'juggling': ('NN', ['JH', 'AH1', 'G', 'AH0', 'L', 'IH0', 'NG']), 'juggle': ('NN', ['JH', 'AH1', 'G', 'AH0', 'L']), 'juggler': ('NN', ['JH', 'AH1', 'G', 'AH0', 'L', 'ER0']), 'jugular': ('NN', ['JH', 'UW1', 'G', 'Y', 'AH0', 'L', 'ER0']), 'juice': ('NN', ['JH', 'UW1', 'S']), 'juicy': ('NN', ['JH', 'UW1', 'S', 'IY0']), 'juke': ('NN', ['JH', 'UW1', 'K']), 'julep': ('NN', ['JH', 'UW1', 'L', 'AH0', 'P']), 'julian': ('NN', ['JH', 'UW1', 'L', 'IY0', 'AH0', 'N']), 'juli': ('NN', ['JH', 'UW1', 'L', 'IY0']), 'july': ('NN', ['JH', 'UW2', 'L', 'AY1']), 'jumbled': ('NNS', ['JH', 'AH1', 'M', 'B', 'AH0', 'L', 'D']), 'jumble': ('JJ', ['JH', 'AH1', 'M', 'B', 'AH0', 'L']), 'jump': ('NN', ['JH', 'AH1', 'M', 'P']), 'jumped': ('NN', ['JH', 'AH1', 'M', 'P', 'T']), 'jumping': ('NN', ['JH', 'AH1', 'M', 'P', 'IH0', 'NG']), 'jumper': ('NN', ['JH', 'AH1', 'M', 'P', 'ER0']), 'junco': ('NN', ['JH', 'AH1', 'NG', 'K', 'OW0']), 'junction': ('NN', ['JH', 'AH1', 'NG', 'K', 'SH', 'AH0', 'N']), 'juncture': ('NN', ['JH', 'AH1', 'NG', 'K', 'CH', 'ER0']), 'june': ('NN', ['JH', 'UW1', 'N']), 'jungle': ('NN', ['JH', 'AH1', 'NG', 'G', 'AH0', 'L']), 'junior': ('NN', ['JH', 'UW1', 'N', 'Y', 'ER0']), 'juniper': ('NN', ['JH', 'UW1', 'N', 'AH0', 'P', 'ER0']), 'junk': ('NN', ['JH', 'AH1', 'NG', 'K']), 'junker': ('NN', ['JH', 'AH1', 'NG', 'K', 'ER0']), 'junket': ('NN', ['JH', 'AH1', 'NG', 'K', 'IH0', 'T']), 'junta': ('NN', ['HH', 'UH1', 'N', 'T', 'AH0']), 'jupiter': ('NN', ['JH', 'UW1', 'P', 'AH0', 'T', 'ER0']), 'jura': ('NN', ['JH', 'UH1', 'R', 'AH0']), 'jurassic': ('NN', ['JH', 'UH0', 'R', 'AE1', 'S', 'IH0', 'K']), 'jurisdiction': ('NN', ['JH', 'UH2', 'R', 'AH0', 'S', 'D', 'IH1', 'K', 'SH', 'AH0', 'N']), 'jurisdictional': ('NN', ['JH', 'UH2', 'R', 'AH0', 'S', 'D', 'IH1', 'K', 'SH', 'AH0', 'N', 'AH0', 'L']), 'jurisprudence': ('NN', ['JH', 'UH2', 'R', 'AH0', 'S', 'P', 'R', 'UW1', 'D', 'AH0', 'N', 'S']), 'jurisprudential': ('NN', ['JH', 'UH2', 'R', 'AH0', 'S', 'P', 'R', 'UW2', 'D', 'EH1', 'N', 'SH', 'AH0', 'L']), 'jurist': ('NN', ['JH', 'UH1', 'R', 'AH0', 'S', 'T']), 'juror': ('NN', ['JH', 'UH1', 'R', 'ER0']), 'jury': ('NN', ['JH', 'UH1', 'R', 'IY0']), 'juries': ('NNS', ['JH', 'UH1', 'R', 'IY0', 'Z']), 'just': ('RB', ['JH', 'AH1', 'S', 'T']), 'justice': ('NN', ['JH', 'AH1', 'S', 'T', 'AH0', 'S']), 'justifiable': ('JJ', ['JH', 'AH1', 'S', 'T', 'AH0', 'F', 'AY2', 'AH0', 'B', 'AH0', 'L']), 'justification': ('NN', ['JH', 'AH2', 'S', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'justified': ('JJ', ['JH', 'AH1', 'S', 'T', 'AH0', 'F', 'AY2', 'D']), 'justifying': ('VBG', ['JH', 'AH1', 'S', 'T', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'justify': ('NN', ['JH', 'AH1', 'S', 'T', 'AH0', 'F', 'AY2']), 'justly': ('RB', ['JH', 'AH1', 'S', 'T', 'L', 'IY0']), 'jutting': ('NN', ['JH', 'AH1', 'T', 'IH0', 'NG']), 'jut': ('NN', ['JH', 'AH1', 'T']), 'jute': ('NN', ['JH', 'UW1', 'T']), 'juvenile': ('NN', ['JH', 'UW1', 'V', 'AH0', 'N', 'AH0', 'L']), 'juxtapose': ('NN', ['JH', 'AH2', 'K', 'S', 'T', 'AH0', 'P', 'OW1', 'Z']), 'juxtaposition': ('NN', ['JH', 'AH2', 'K', 'S', 'T', 'AH0', 'P', 'AH0', 'Z', 'IH1', 'SH', 'AH0', 'N']), 'k': ('NN', ['K', 'EY1']), 'kage': ('NN', ['K', 'EY1', 'JH']), 'kail': ('NN', ['K', 'EY1', 'L']), 'kain': ('NN', ['K', 'EY1', 'N']), 'kaiser': ('NN', ['K', 'AY1', 'Z', 'ER0']), 'kalan': ('NN', ['K', 'EY1', 'L', 'AH0', 'N']), 'kale': ('NN', ['K', 'EY1', 'L']), 'kaleidoscope': ('NN', ['K', 'AH0', 'L', 'AY1', 'D', 'AH0', 'S', 'K', 'OW2', 'P']), 'kam': ('NN', ['K', 'AE1', 'M']), 'kama': ('NN', ['K', 'AA1', 'M', 'AH0']), 'kan': ('NN', ['K', 'AE1', 'N']), 'kangaroo': ('NN', ['K', 'AE2', 'NG', 'G', 'ER0', 'UW1']), 'kansas': ('NN', ['K', 'AE1', 'N', 'Z', 'AH0', 'S']), 'kantian': ('NN', ['K', 'AE1', 'N', 'T', 'IY0', 'AH0', 'N']), 'kaolin': ('NN', ['K', 'AW1', 'L', 'IH0', 'N']), 'karma': ('NN', ['K', 'AA1', 'R', 'M', 'AH0']), 'karn': ('NN', ['K', 'AA1', 'R', 'N']), 'kat': ('NN', ['K', 'AE1', 'T']), 'kate': ('NN', ['K', 'EY1', 'T']), 'katydid': ('NN', ['K', 'EY1', 'T', 'IY0', 'D', 'IH2', 'D']), 'kayak': ('NN', ['K', 'AY1', 'AE0', 'K']), 'kayaker': ('NN', ['K', 'AY1', 'AE0', 'K', 'ER0']), 'keck': ('NN', ['K', 'EH1', 'K']), 'kee': ('NN', ['K', 'IY1']), 'keech': ('NN', ['K', 'IY1', 'CH']), 'keel': ('NN', ['K', 'IY1', 'L']), 'keeling': ('VBG', ['K', 'IY1', 'L', 'IH0', 'NG']), 'keeler': ('NN', ['K', 'IY1', 'L', 'ER0']), 'men': ('NNS', ['M', 'EH1', 'N']), 'keels': ('NNS', ['K', 'IY1', 'L', 'Z']), 'keelson': ('NN', ['K', 'EH1', 'L', 'S', 'AH0', 'N']), 'keen': ('NN', ['K', 'IY1', 'N']), 'keener': ('NN', ['K', 'IY1', 'N', 'ER0']), 'keenly': ('RB', ['K', 'IY1', 'N', 'L', 'IY0']), 'kept': ('NN', ['K', 'EH1', 'P', 'T']), 'keeping': ('VBG', ['K', 'IY1', 'P', 'IH0', 'NG']), 'keep': ('VB', ['K', 'IY1', 'P']), 'keeper': ('NN', ['K', 'IY1', 'P', 'ER0']), 'keepsake': ('VB', ['K', 'IY1', 'P', 'S', 'EY2', 'K']), 'keever': ('NN', ['K', 'IY1', 'V', 'ER0']), 'keg': ('NN', ['K', 'EH1', 'G']), 'keir': ('NN', ['K', 'IY1', 'R']), 'kell': ('NN', ['K', 'EH1', 'L']), 'kelp': ('NN', ['K', 'EH1', 'L', 'P']), 'kelson': ('NN', ['K', 'EH1', 'L', 'S', 'AH0', 'N']), 'kelter': ('NN', ['K', 'EH1', 'L', 'T', 'ER0']), 'kemp': ('NN', ['K', 'EH1', 'M', 'P']), 'kempe': ('NN', ['K', 'EH1', 'M', 'P']), 'ken': ('VB', ['K', 'EH1', 'N']), 'kenning': ('VBG', ['K', 'EH1', 'N', 'IH0', 'NG']), 'kendal': ('NN', ['K', 'EH1', 'N', 'D', 'AH0', 'L']), 'kennel': ('NNS', ['K', 'EH1', 'N', 'AH0', 'L']), 'keno': ('NN', ['K', 'IY1', 'N', 'OW0']), 'kentucky': ('NN', ['K', 'AH0', 'N', 'T', 'AH1', 'K', 'IY0']), 'keratin': ('NN', ['K', 'EH1', 'R', 'AH0', 'T', 'AH0', 'N']), 'kerb': ('NN', ['K', 'ER1', 'B']), 'kercher': ('NN', ['K', 'ER1', 'K', 'ER0']), 'kerchiefs': ('NNS', ['K', 'ER1', 'CH', 'AH0', 'F', 'S']), 'kerchief': ('NN', ['K', 'ER1', 'CH', 'AH0', 'F']), 'kerl': ('NN', ['K', 'ER1', 'L']), 'kern': ('NN', ['K', 'ER1', 'N']), 'kernel': ('NNS', ['K', 'ER1', 'N', 'AH0', 'L']), 'kerosene': ('NN', ['K', 'EH1', 'R', 'AH0', 'S', 'IY2', 'N']), 'kersey': ('NN', ['K', 'ER1', 'S', 'IY0']), 'kess': ('NN', ['K', 'EH1', 'S']), 'ketch': ('NN', ['K', 'EH1', 'CH']), 'ketchup': ('NN', ['K', 'EH1', 'CH', 'AH0', 'P']), 'ketone': ('NN', ['K', 'IY1', 'T', 'OW0', 'N']), 'kettle': ('NN', ['K', 'EH1', 'T', 'AH0', 'L']), 'kevin': ('NN', ['K', 'EH1', 'V', 'IH0', 'N']), 'kever': ('NN', ['K', 'EH1', 'V', 'ER0']), 'key': ('NN', ['K', 'IY1']), 'keying': ('VBG', ['K', 'IY1', 'IH0', 'NG']), 'keyboard': ('NN', ['K', 'IY1', 'B', 'AO2', 'R', 'D']), 'keyed': ('NNS', ['K', 'IY1', 'D']), 'keyhole': ('NN', ['K', 'IY1', 'HH', 'OW2', 'L']), 'keynote': ('NN', ['K', 'IY1', 'N', 'OW2', 'T']), 'keystone': ('NN', ['K', 'IY1', 'S', 'T', 'OW2', 'N']), 'khan': ('NN', ['K', 'AA1', 'N']), 'khanate': ('NN', ['K', 'AA1', 'N', 'EY0', 'T']), 'kibble': ('JJ', ['K', 'IH1', 'B', 'AH0', 'L']), 'kicking': ('VBG', ['K', 'IH1', 'K', 'IH0', 'NG']), 'kick': ('NN', ['K', 'IH1', 'K']), 'kicker': ('NN', ['K', 'IH1', 'K', 'ER0']), 'kid': ('NN', ['K', 'IH1', 'D']), 'kidded': ('VBN', ['K', 'IH1', 'D', 'IH0', 'D']), 'kidding': ('VBG', ['K', 'IH1', 'D', 'IH0', 'NG']), 'kidde': ('NN', ['K', 'IH1', 'D']), 'kiddle': ('VB', ['K', 'IH1', 'D', 'AH0', 'L']), 'kiddy': ('NN', ['K', 'IH1', 'D', 'IY0']), 'kidnaped': ('VBD', ['K', 'IH1', 'D', 'N', 'AE2', 'P', 'T']), 'kidnapped': ('VBD', ['K', 'IH1', 'D', 'N', 'AE2', 'P', 'T']), 'kidnaping': ('VBG', ['K', 'IH1', 'D', 'N', 'AE2', 'P', 'IH0', 'NG']), 'kidnapping': ('NN', ['K', 'IH1', 'D', 'N', 'AE2', 'P', 'IH0', 'NG']), 'kidnap': ('NN', ['K', 'IH1', 'D', 'N', 'AE2', 'P']), 'kidnapper': ('NN', ['K', 'IH1', 'D', 'N', 'AE2', 'P', 'ER0']), 'kidneys': ('NNS', ['K', 'IH1', 'D', 'N', 'IY0', 'Z']), 'kidney': ('NN', ['K', 'IH1', 'D', 'N', 'IY0']), 'kier': ('NN', ['K', 'IH1', 'R']), 'kill': ('NN', ['K', 'IH1', 'L']), 'killed': ('VBN', ['K', 'IH1', 'L', 'D']), 'killing': ('VBG', ['K', 'IH1', 'L', 'IH0', 'NG']), 'killer': ('NN', ['K', 'IH1', 'L', 'ER0']), 'killifish': ('VB', ['K', 'IH1', 'L', 'IH0', 'F', 'IH0', 'SH']), 'kiln': ('NN', ['K', 'IH1', 'L', 'N']), 'kilos': ('NN', ['K', 'IY1', 'L', 'OW2', 'Z']), 'kilo': ('NN', ['K', 'IH1', 'L', 'OW2']), 'kilogram': ('NN', ['K', 'IH1', 'L', 'AH0', 'G', 'R', 'AE2', 'M']), 'l': ('NN', ['EH1', 'L']), 'la': ('NN', ['L', 'AA1']), 'laas': ('NN', ['L', 'AA1', 'Z']), 'lab': ('NN', ['L', 'AE1', 'B']), 'label': ('NN', ['L', 'EY1', 'B', 'AH0', 'L']), 'labeled': ('VBN', ['L', 'EY1', 'B', 'AH0', 'L', 'D']), 'labelled': ('VBN', ['L', 'EY1', 'B', 'AH0', 'L', 'D']), 'labeling': ('VBG', ['L', 'EY1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'labella': ('NN', ['L', 'AH0', 'B', 'EH1', 'L', 'AH0']), 'labor': ('NN', ['L', 'EY1', 'B', 'ER0']), 'labored': ('VBN', ['L', 'EY1', 'B', 'ER0', 'D']), 'laboring': ('VBG', ['L', 'EY1', 'B', 'ER0', 'IH0', 'NG']), 'laboratories': ('NNS', ['L', 'AE1', 'B', 'R', 'AH0', 'T', 'AO2', 'R', 'IY0', 'Z']), 'laboratory': ('NN', ['L', 'AE1', 'B', 'R', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'laborer': ('NN', ['L', 'EY1', 'B', 'ER0', 'ER0']), 'laborious': ('JJ', ['L', 'AH0', 'B', 'AO1', 'R', 'IY0', 'AH0', 'S']), 'labrador': ('NN', ['L', 'AE1', 'B', 'R', 'AH0', 'D', 'AO2', 'R']), 'labyrinth': ('NN', ['L', 'AE1', 'B', 'ER0', 'IH2', 'N', 'TH']), 'labyrinthine': ('NN', ['L', 'AE2', 'B', 'ER0', 'IH1', 'N', 'TH', 'IY2', 'N']), 'lac': ('NN', ['L', 'AE1', 'K']), 'lace': ('NN', ['L', 'EY1', 'S']), 'laced': ('VBN', ['L', 'EY1', 'S', 'T']), 'lacerate': ('NN', ['L', 'AE1', 'S', 'ER0', 'EY2', 'T']), 'laceration': ('NN', ['L', 'AE2', 'S', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'lache': ('NN', ['L', 'AE1', 'CH']), 'ries': ('NNS', ['R', 'AY1', 'Z']), 'lack': ('NN', ['L', 'AE1', 'K']), 'lacked': ('VBN', ['L', 'AE1', 'K', 'T']), 'lacking': ('VBG', ['L', 'AE1', 'K', 'IH0', 'NG']), 'lackadaisical': ('JJ', ['L', 'AE2', 'K', 'AH0', 'D', 'EY1', 'Z', 'IH0', 'K', 'AH0', 'L']), 'lackeys': ('NNS', ['L', 'AE1', 'K', 'IY0', 'Z']), 'lackey': ('NN', ['L', 'AE1', 'K', 'IY0']), 'lackluster': ('NN', ['L', 'AE1', 'K', 'L', 'AH2', 'S', 'T', 'ER0']), 'laconic': ('NN', ['L', 'AA0', 'K', 'AA1', 'N', 'IH0', 'K']), 'lacquer': ('NN', ['L', 'AE1', 'K', 'ER0']), 'lacquered': ('VBN', ['L', 'AE1', 'K', 'ER0', 'D']), 'lacrosse': ('NN', ['L', 'AH0', 'K', 'R', 'AO1', 'S']), 'lactate': ('NN', ['L', 'AE1', 'K', 'T', 'EY0', 'T']), 'lactation': ('NN', ['L', 'AE0', 'K', 'T', 'EY1', 'SH', 'AH0', 'N']), 'lactic': ('JJ', ['L', 'AE1', 'K', 'T', 'IH0', 'K']), 'lactone': ('NN', ['L', 'AE1', 'K', 'T', 'OW0', 'N']), 'lactose': ('NN', ['L', 'AE1', 'K', 'T', 'OW0', 'S']), 'lad': ('NN', ['L', 'AE1', 'D']), 'ladder': ('NN', ['L', 'AE1', 'D', 'ER0']), 'lading': ('VBG', ['L', 'EY1', 'D', 'IH0', 'NG']), 'lade': ('NN', ['L', 'EY1', 'D']), 'laden': ('NN', ['L', 'EY1', 'D', 'AH0', 'N']), 'ladino': ('NN', ['L', 'AH0', 'D', 'IY1', 'N', 'OW0']), 'ladle': ('NN', ['L', 'EY1', 'D', 'AH0', 'L']), 'ladled': ('VBN', ['L', 'EY1', 'D', 'AH0', 'L', 'D']), 'ladling': ('VBG', ['L', 'EY1', 'D', 'L', 'IH0', 'NG']), 'ladies': ('NNS', ['L', 'EY1', 'D', 'IY0', 'Z']), 'lady': ('NN', ['L', 'EY1', 'D', 'IY0']), 'ladybird': ('NN', ['L', 'EY1', 'D', 'IY0', 'B', 'ER2', 'D']), 'ladybug': ('NN', ['L', 'EY1', 'D', 'IY0', 'B', 'AH2', 'G']), 'ladylike': ('NN', ['L', 'EY1', 'D', 'IY0', 'L', 'AY2', 'K']), 'lafayette': ('NN', ['L', 'AA2', 'F', 'IY0', 'EH1', 'T']), 'lag': ('NN', ['L', 'AE1', 'G']), 'lagged': ('VBN', ['L', 'AE1', 'G', 'D']), 'lagging': ('VBG', ['L', 'AE1', 'G', 'IH0', 'NG']), 'lagan': ('NN', ['L', 'AE1', 'G', 'AH0', 'N']), 'lager': ('NN', ['L', 'AA1', 'G', 'ER0']), 'laggard': ('NN', ['L', 'AE1', 'G', 'ER0', 'D']), 'lagoon': ('NN', ['L', 'AH0', 'G', 'UW1', 'N']), 'laid': ('NN', ['L', 'EY1', 'D']), 'lain': ('NN', ['L', 'EY1', 'N']), 'lair': ('NN', ['L', 'EH1', 'R']), 'laird': ('NN', ['L', 'EH1', 'R', 'D']), 'laity': ('NN', ['L', 'EY1', 'AH0', 'T', 'IY0']), 'lake': ('NN', ['L', 'EY1', 'K']), 'lakin': ('NN', ['L', 'AE1', 'K', 'IH0', 'N']), 'lam': ('NN', ['L', 'AE1', 'M']), 'lama': ('NN', ['L', 'AA1', 'M', 'AH0']), 'lamaist': ('NN', ['L', 'AA1', 'M', 'AY0', 'IH0', 'S', 'T']), 'lamb': ('NN', ['L', 'AE1', 'M']), 'lambing': ('VBG', ['L', 'AE1', 'M', 'IH0', 'NG']), 'lambaste': ('NN', ['L', 'AE2', 'M', 'B', 'AE1', 'S', 'T']), 'lambda': ('NN', ['L', 'AE1', 'M', 'D', 'AH0']), 'lame': ('NN', ['L', 'EY1', 'M']), 'lamellar': ('NN', ['L', 'AH0', 'M', 'EH1', 'L', 'ER0']), 'lamely': ('RB', ['L', 'EY1', 'M', 'L', 'IY0']), 'lament': ('NN', ['L', 'AH0', 'M', 'EH1', 'N', 'T']), 'lamented': ('VBN', ['L', 'AH0', 'M', 'EH1', 'N', 'T', 'IH0', 'D']), 'lamenting': ('VBG', ['L', 'AH0', 'M', 'EH1', 'N', 'T', 'IH0', 'NG']), 'lamentable': ('JJ', ['L', 'AH0', 'M', 'EH1', 'N', 'T', 'AH0', 'B', 'AH0', 'L']), 'lamia': ('NN', ['L', 'EY1', 'M', 'IY0', 'AH0']), 'lamina': ('NN', ['L', 'AE1', 'M', 'AH0', 'N', 'AH0']), 'laminar': ('NN', ['L', 'AE1', 'M', 'IH0', 'N', 'ER0']), 'laminate': ('NN', ['L', 'AE1', 'M', 'AH0', 'N', 'AH0', 'T']), 'laminated': ('VBN', ['L', 'AE1', 'M', 'AH0', 'N', 'EY2', 'T', 'AH0', 'D']), 'laminating': ('VBG', ['L', 'AE1', 'M', 'AH0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'lamm': ('NN', ['L', 'AE1', 'M']), 'lamp': ('NN', ['L', 'AE1', 'M', 'P']), 'lamping': ('VBG', ['L', 'AE1', 'M', 'P', 'IH0', 'NG']), 'lamplighter': ('NN', ['L', 'AE1', 'M', 'P', 'L', 'AY2', 'T', 'ER0']), 'lampoon': ('NN', ['L', 'AE0', 'M', 'P', 'UW1', 'N']), 'lampooned': ('VBN', ['L', 'AE0', 'M', 'P', 'UW1', 'N', 'D']), 'lampreys': ('NNS', ['L', 'AE1', 'M', 'P', 'R', 'IY0', 'Z']), 'lamprey': ('NN', ['L', 'AE1', 'M', 'P', 'R', 'IY0']), 'lampron': ('NN', ['L', 'AE1', 'M', 'P', 'R', 'AH0', 'N']), 'lance': ('NN', ['L', 'AE1', 'N', 'S']), 'lancing': ('VBG', ['L', 'AE1', 'N', 'S', 'IH0', 'NG']), 'lancelet': ('NN', ['L', 'AE1', 'N', 'S', 'L', 'AH0', 'T']), 'lanceolate': ('NN', ['L', 'AE1', 'N', 'S', 'IY0', 'AH0', 'L', 'EY2', 'T']), 'lancer': ('NN', ['L', 'AE1', 'N', 'S', 'ER0']), 'lancet': ('NN', ['L', 'AE1', 'N', 'S', 'AH0', 'T']), 'land': ('NN', ['L', 'AE1', 'N', 'D']), 'landed': ('VBD', ['L', 'AE1', 'N', 'D', 'AH0', 'D']), 'landing': ('VBG', ['L', 'AE1', 'N', 'D', 'IH0', 'NG']), 'landau': ('NN', ['L', 'AE1', 'N', 'D', 'AW2']), 'lander': ('NN', ['L', 'AE1', 'N', 'D', 'ER0']), 'landfall': ('NN', ['L', 'AE1', 'N', 'D', 'F', 'AO2', 'L']), 'landgrave': ('NN', ['L', 'AE1', 'N', 'D', 'G', 'R', 'EY2', 'V']), 'landholder': ('NN', ['L', 'AE1', 'N', 'D', 'HH', 'OW2', 'L', 'D', 'ER0']), 'landlady': ('NN', ['L', 'AE1', 'N', 'D', 'L', 'EY2', 'D', 'IY0']), 'landless': ('NN', ['L', 'AE1', 'N', 'D', 'L', 'AH0', 'S']), 'landlock': ('NN', ['L', 'AE1', 'N', 'D', 'L', 'AA2', 'K']), 'landlocked': ('VBN', ['L', 'AE1', 'N', 'D', 'L', 'AA2', 'K', 'T']), 'landlord': ('NN', ['L', 'AE1', 'N', 'D', 'L', 'AO2', 'R', 'D']), 'landman': ('NN', ['L', 'AE1', 'N', 'D', 'M', 'AH0', 'N']), 'landmark': ('NN', ['L', 'AE1', 'N', 'D', 'M', 'AA2', 'R', 'K']), 'landowner': ('NN', ['L', 'AE1', 'N', 'D', 'OW2', 'N', 'ER0']), 'landscape': ('NN', ['L', 'AE1', 'N', 'D', 'S', 'K', 'EY2', 'P']), 'landslide': ('NN', ['L', 'AE1', 'N', 'D', 'S', 'L', 'AY2', 'D']), 'landsman': ('NN', ['L', 'AE1', 'N', 'D', 'Z', 'M', 'AH0', 'N']), 'landward': ('NN', ['L', 'AE1', 'N', 'D', 'W', 'ER0', 'D']), 'landwehr': ('NN', ['L', 'AE1', 'N', 'D', 'W', 'IH0', 'R']), 'lane': ('NN', ['L', 'EY1', 'N']), 'lang': ('NN', ['L', 'AE1', 'NG']), 'langridge': ('NN', ['L', 'AE1', 'NG', 'G', 'R', 'IH0', 'JH']), 'language': ('NN', ['L', 'AE1', 'NG', 'G', 'W', 'AH0', 'JH']), 'languid': ('NN', ['L', 'AE1', 'NG', 'G', 'W', 'AH0', 'D']), 'languished': ('VBN', ['L', 'AE1', 'NG', 'G', 'W', 'IH0', 'SH', 'T']), 'languishing': ('VBG', ['L', 'AE1', 'NG', 'G', 'W', 'IH0', 'SH', 'IH0', 'NG']), 'languish': ('JJ', ['L', 'AE1', 'NG', 'G', 'W', 'IH0', 'SH']), 'lanier': ('NN', ['L', 'AH0', 'N', 'IH1', 'R']), 'lank': ('NN', ['L', 'AE1', 'NG', 'K']), 'lanky': ('NN', ['L', 'AE1', 'NG', 'K', 'IY0']), 'lanolin': ('NN', ['L', 'AE1', 'N', 'AH0', 'L', 'AH0', 'N']), 'lant': ('NN', ['L', 'AE1', 'N', 'T']), 'lantern': ('NN', ['L', 'AE1', 'N', 'T', 'ER0', 'N']), 'lanthanum': ('NN', ['L', 'AE1', 'N', 'TH', 'AH0', 'N', 'AH0', 'M']), 'lanyard': ('NN', ['L', 'AE1', 'N', 'Y', 'ER0', 'D']), 'lap': ('NN', ['L', 'AE1', 'P']), 'lapped': ('VBD', ['L', 'AE1', 'P', 'T']), 'lapping': ('VBG', ['L', 'AE1', 'P', 'IH0', 'NG']), 'lapdog': ('NN', ['L', 'AE1', 'P', 'D', 'AO2', 'G']), 'lapel': ('NN', ['L', 'AH0', 'P', 'EH1', 'L']), 'lapidary': ('JJ', ['L', 'AE1', 'P', 'AH0', 'D', 'EH2', 'R', 'IY0']), 'lapides': ('NNS', ['L', 'AH0', 'P', 'AY1', 'D', 'Z']), 'lapis': ('NN', ['L', 'AE1', 'P', 'AH0', 'S']), 'lapp': ('NN', ['L', 'AE1', 'P']), 'lapps': ('NN', ['L', 'AE1', 'P', 'S']), 'lapse': ('NN', ['L', 'AE1', 'P', 'S']), 'lapsed': ('VBN', ['L', 'AE1', 'P', 'S', 'T']), 'lapsing': ('VBG', ['L', 'AE1', 'P', 'S', 'IH0', 'NG']), 'lars': ('NNS', ['L', 'AA1', 'R', 'Z']), 'lar': ('NN', ['L', 'AA1', 'R']), 'larcenous': ('JJ', ['L', 'AA1', 'R', 'S', 'AH0', 'N', 'AH0', 'S']), 'larceny': ('NN', ['L', 'AA1', 'R', 'S', 'AH0', 'N', 'IY0']), 'larch': ('NN', ['L', 'AA1', 'R', 'CH']), 'lard': ('NN', ['L', 'AA1', 'R', 'D']), 'larded': ('VBD', ['L', 'AA1', 'R', 'D', 'IH0', 'D']), 'larder': ('NN', ['L', 'AA1', 'R', 'D', 'ER0']), 'lare': ('NN', ['L', 'EH1', 'R']), 'large': ('JJ', ['L', 'AA1', 'R', 'JH']), 'largely': ('RB', ['L', 'AA1', 'R', 'JH', 'L', 'IY0']), 'largeness': ('NN', ['L', 'AA1', 'R', 'JH', 'N', 'IH0', 'S']), 'largess': ('NN', ['L', 'AA1', 'R', 'JH', 'AH0', 'S']), 'largesse': ('NN', ['L', 'AA0', 'R', 'G', 'EH1', 'S']), 'largo': ('NN', ['L', 'AA1', 'R', 'G', 'OW0']), 'lariat': ('NN', ['L', 'EH1', 'R', 'IY0', 'AH0', 'T']), 'larine': ('NN', ['L', 'AA0', 'R', 'IY1', 'N', 'IY0']), 'lark': ('NN', ['L', 'AA1', 'R', 'K']), 'larkspur': ('NN', ['L', 'AA1', 'R', 'K', 'S', 'P', 'ER2']), 'larry': ('NN', ['L', 'EH1', 'R', 'IY0']), 'larvae': ('NN', ['L', 'AA1', 'R', 'V', 'IY0']), 'larva': ('NN', ['L', 'AA1', 'R', 'V', 'AH0']), 'larval': ('NN', ['L', 'AA1', 'R', 'V', 'AH0', 'L']), 'lary': ('JJ', ['L', 'EH1', 'R', 'IY0']), 'laryngeal': ('NN', ['L', 'ER0', 'IH1', 'N', 'JH', 'IY0', 'AH0', 'L']), 'laryngitis': ('NN', ['L', 'EH2', 'R', 'AH0', 'N', 'JH', 'AY1', 'T', 'AH0', 'S']), 'larynx': ('NN', ['L', 'EH1', 'R', 'IH0', 'NG', 'K', 'S']), 'las': ('NNS', ['L', 'AA1', 'S']), 'lascivious': ('JJ', ['L', 'AH0', 'S', 'IH1', 'V', 'IY0', 'AH0', 'S']), 'lash': ('NN', ['L', 'AE1', 'SH']), 'lashed': ('VBN', ['L', 'AE1', 'SH', 'T']), 'lasher': ('NN', ['L', 'AE1', 'SH', 'ER0']), 'lashing': ('VBG', ['L', 'AE1', 'SH', 'IH0', 'NG']), 'lask': ('NN', ['L', 'AE1', 'S', 'K']), 'lass': ('NN', ['L', 'AE1', 'S']), 'lassie': ('NN', ['L', 'AE1', 'S', 'IY0']), 'lasso': ('NN', ['L', 'AE1', 'S', 'OW0']), 'last': ('JJ', ['L', 'AE1', 'S', 'T']), 'lasted': ('VBN', ['L', 'AE1', 'S', 'T', 'AH0', 'D']), 'lasting': ('VBG', ['L', 'AE1', 'S', 'T', 'IH0', 'NG']), 'laster': ('NN', ['L', 'AE1', 'S', 'T', 'ER0']), 'lastly': ('RB', ['L', 'AE1', 'S', 'T', 'L', 'IY0']), 'lat': ('NN', ['L', 'AA1', 'T']), 'latch': ('NN', ['L', 'AE1', 'CH']), 'latched': ('VBN', ['L', 'AE1', 'CH', 'T']), 'latching': ('VBG', ['L', 'AE1', 'CH', 'IH0', 'NG']), 'latchkey': ('NN', ['L', 'AE1', 'CH', 'K', 'IY2']), 'late': ('RB', ['L', 'EY1', 'T']), 'lately': ('RB', ['L', 'EY1', 'T', 'L', 'IY0']), 'latency': ('NN', ['L', 'EY1', 'T', 'AH0', 'N', 'S', 'IY0']), 'lateness': ('NN', ['L', 'EY1', 'T', 'N', 'AH0', 'S']), 'latent': ('NN', ['L', 'EY1', 'T', 'AH0', 'N', 'T']), 'later': ('RB', ['L', 'EY1', 'T', 'ER0']), 'lateral': ('JJ', ['L', 'AE1', 'T', 'ER0', 'AH0', 'L']), 'latex': ('NN', ['L', 'EY1', 'T', 'EH2', 'K', 'S']), 'lath': ('NN', ['L', 'AE1', 'TH']), 'lathe': ('NN', ['L', 'EY1', 'DH']), 'lather': ('NN', ['L', 'AE1', 'DH', 'ER0']), 'latimer': ('NN', ['L', 'AE1', 'T', 'AH0', 'M', 'ER0']), 'latin': ('NN', ['L', 'AE1', 'T', 'AH0', 'N']), 'latitude': ('NN', ['L', 'AE1', 'T', 'AH0', 'T', 'UW2', 'D']), 'latitudinal': ('JJ', ['L', 'AE2', 'T', 'AH0', 'T', 'UW1', 'D', 'AH0', 'N', 'AH0', 'L']), 'laton': ('NN', ['L', 'AE1', 'T', 'AH0', 'N']), 'latrine': ('NN', ['L', 'AH0', 'T', 'R', 'IY1', 'N']), 'latter': ('NN', ['L', 'AE1', 'T', 'ER0']), 'lattice': ('NN', ['L', 'AE1', 'T', 'AH0', 'S']), 'latticework': ('NN', ['L', 'AE1', 'T', 'AH0', 'S', 'W', 'ER2', 'K']), 'laud': ('NN', ['L', 'AO1', 'D']), 'lauded': ('VBD', ['L', 'AO1', 'D', 'IH0', 'D']), 'lauding': ('VBG', ['L', 'AO1', 'D', 'IH0', 'NG']), 'laudable': ('JJ', ['L', 'AO1', 'D', 'AH0', 'B', 'AH0', 'L']), 'laudanum': ('NN', ['L', 'AO1', 'D', 'AH0', 'N', 'AH0', 'M']), 'laudatory': ('NN', ['L', 'AO1', 'D', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'lauder': ('NN', ['L', 'AO1', 'D', 'ER0']), 'laughed': ('VBN', ['L', 'AE1', 'F', 'T']), 'laughing': ('VBG', ['L', 'AE1', 'F', 'IH0', 'NG']), 'laugh': ('NN', ['L', 'AE1', 'F']), 'laughable': ('JJ', ['L', 'AE1', 'F', 'AH0', 'B', 'AH0', 'L']), 'laugher': ('NN', ['L', 'AA1', 'K', 'ER0']), 'laughingly': ('RB', ['L', 'AE1', 'F', 'IH0', 'NG', 'L', 'IY0']), 'laughingstock': ('NN', ['L', 'AE1', 'F', 'IH0', 'NG', 'S', 'T', 'AA2', 'K']), 'laughter': ('NN', ['L', 'AE1', 'F', 'T', 'ER0']), 'launched': ('VBN', ['L', 'AO1', 'N', 'CH', 'T']), 'launching': ('VBG', ['L', 'AO1', 'N', 'CH', 'IH0', 'NG']), 'launch': ('NN', ['L', 'AO1', 'N', 'CH']), 'launder': ('NN', ['L', 'AO1', 'N', 'D', 'ER0']), 'laundered': ('VBN', ['L', 'AO1', 'N', 'D', 'ER0', 'D']), 'laundering': ('NN', ['L', 'AO1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'launderer': ('NN', ['L', 'AO1', 'N', 'D', 'ER0', 'ER0']), 'laundries': ('NNS', ['L', 'AO1', 'N', 'D', 'R', 'IY0', 'Z']), 'laundry': ('NN', ['L', 'AO1', 'N', 'D', 'R', 'IY0']), 'laura': ('NN', ['L', 'AO1', 'R', 'AH0']), 'laureate': ('NN', ['L', 'AO1', 'R', 'IY0', 'AH0', 'T']), 'laurel': ('NN', ['L', 'AO1', 'R', 'AH0', 'L']), 'laurentian': ('JJ', ['L', 'AO0', 'R', 'EH1', 'N', 'SH', 'AH0', 'N']), 'laurin': ('NN', ['L', 'AO1', 'R', 'IH0', 'N']), 'lava': ('NN', ['L', 'AA1', 'V', 'AH0']), 'lavatory': ('NN', ['L', 'AE1', 'V', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'lavatories': ('NNS', ['L', 'AE1', 'V', 'AH0', 'T', 'AO2', 'R', 'IY0', 'Z']), 'lave': ('NN', ['L', 'EY1', 'V']), 'lavender': ('NN', ['L', 'AE1', 'V', 'AH0', 'N', 'D', 'ER0']), 'laver': ('NN', ['L', 'EY1', 'V', 'ER0']), 'lavish': ('JJ', ['L', 'AE1', 'V', 'IH0', 'SH']), 'lavished': ('VBN', ['L', 'AE1', 'V', 'IH0', 'SH', 'T']), 'lavishing': ('VBG', ['L', 'AE1', 'V', 'IH0', 'SH', 'IH0', 'NG']), 'lavishly': ('RB', ['L', 'AE1', 'V', 'IH0', 'SH', 'L', 'IY0']), 'law': ('NN', ['L', 'AO1']), 'lawbreaker': ('NN', ['L', 'AO1', 'B', 'R', 'EY2', 'K', 'ER0']), 'lawful': ('JJ', ['L', 'AO1', 'F', 'AH0', 'L']), 'lawing': ('VBG', ['L', 'AO1', 'IH0', 'NG']), 'lawless': ('NN', ['L', 'AO1', 'L', 'AH0', 'S']), 'lawmaker': ('NN', ['L', 'AO1', 'M', 'EY2', 'K', 'ER0']), 'lawn': ('NN', ['L', 'AO1', 'N']), 'lawsuit': ('NN', ['L', 'AO1', 'S', 'UW2', 'T']), 'lawyer': ('NN', ['L', 'AO1', 'Y', 'ER0']), 'lawyerly': ('RB', ['L', 'AO1', 'Y', 'ER0', 'L', 'IY0']), 'lax': ('NN', ['L', 'AE1', 'K', 'S']), 'laxative': ('NN', ['L', 'AE1', 'K', 'S', 'AH0', 'T', 'IH0', 'V']), 'laxity': ('NN', ['L', 'AE1', 'K', 'S', 'AH0', 'T', 'IY0']), 'lay': ('NN', ['L', 'EY1']), 'laying': ('VBG', ['L', 'EY1', 'IH0', 'NG']), 'layer': ('NN', ['L', 'EY1', 'ER0']), 'layering': ('VBG', ['L', 'EY1', 'ER0', 'IH0', 'NG']), 'layland': ('NN', ['L', 'EY1', 'L', 'AH0', 'N', 'D']), 'laymen': ('NNS', ['L', 'EY1', 'M', 'AH0', 'N']), 'layman': ('NN', ['L', 'EY1', 'M', 'AH0', 'N']), 'lazar': ('NN', ['L', 'EY1', 'Z', 'ER0']), 'lazily': ('RB', ['L', 'AE1', 'Z', 'AH0', 'L', 'IY0']), 'laziness': ('NN', ['L', 'EY1', 'Z', 'IY0', 'N', 'AH0', 'S']), 'lazuli': ('NN', ['L', 'AH0', 'Z', 'UW1', 'L', 'IY0']), 'lazulite': ('NN', ['L', 'AE1', 'Z', 'AH0', 'L', 'AY2', 'T']), 'lazy': ('NN', ['L', 'EY1', 'Z', 'IY0']), 'lea': ('NN', ['L', 'IY1']), 'leach': ('NN', ['L', 'IY1', 'CH']), 'leached': ('VBN', ['L', 'IY1', 'CH', 'T']), 'leaching': ('VBG', ['L', 'IY1', 'CH', 'IH0', 'NG']), 'lead': ('NN', ['L', 'EH1', 'D']), 'leaded': ('VBD', ['L', 'EH1', 'D', 'IH0', 'D']), 'leading': ('VBG', ['L', 'IY1', 'D', 'IH0', 'NG']), 'led': ('VBN', ['L', 'EH1', 'D']), 'leaden': ('NN', ['L', 'EH1', 'D', 'AH0', 'N']), 'leader': ('NN', ['L', 'IY1', 'D', 'ER0']), 'leadership': ('NN', ['L', 'IY1', 'D', 'ER0', 'SH', 'IH2', 'P']), 'leaves': ('NNS', ['L', 'IY1', 'V', 'Z']), 'leaf': ('NN', ['L', 'IY1', 'F']), 'leafed': ('NN', ['L', 'IY1', 'F', 'T']), 'leafing': ('VBG', ['L', 'IY1', 'F', 'IH0', 'NG']), 'leafless': ('NN', ['L', 'IY1', 'F', 'L', 'AH0', 'S']), 'leaflet': ('NN', ['L', 'IY1', 'F', 'L', 'AH0', 'T']), 'leafy': ('NN', ['L', 'IY1', 'F', 'IY0']), 'league': ('NN', ['L', 'IY1', 'G']), 'leaguer': ('NN', ['L', 'IY1', 'G', 'ER0']), 'leak': ('NN', ['L', 'IY1', 'K']), 'leaked': ('VBN', ['L', 'IY1', 'K', 'T']), 'leaking': ('VBG', ['L', 'IY1', 'K', 'IH0', 'NG']), 'leakage': ('NN', ['L', 'IY1', 'K', 'AH0', 'JH']), 'leaky': ('NN', ['L', 'IY1', 'K', 'IY0']), 'leal': ('NN', ['L', 'IY1', 'L']), 'leamer': ('NN', ['L', 'IY1', 'M', 'ER0']), 'lean': ('NN', ['L', 'IY1', 'N']), 'leaned': ('VBN', ['L', 'IY1', 'N', 'D']), 'leant': ('NN', ['L', 'IY1', 'AH0', 'N', 'T']), 'leaning': ('VBG', ['L', 'IY1', 'N', 'IH0', 'NG']), 'leap': ('NN', ['L', 'IY1', 'P']), 'leaped': ('VBD', ['L', 'EH1', 'P', 'T']), 'leapt': ('NN', ['L', 'EH1', 'P', 'T']), 'leaping': ('VBG', ['L', 'IY1', 'P', 'IH0', 'NG']), 'leapfrog': ('NN', ['L', 'IY1', 'P', 'F', 'R', 'AO2', 'G']), 'lear': ('NN', ['L', 'IH1', 'R']), 'learned': ('VBN', ['L', 'ER1', 'N', 'D']), 'learnt': ('NN', ['L', 'ER1', 'N', 'T']), 'learning': ('VBG', ['L', 'ER1', 'N', 'IH0', 'NG']), 'learn': ('NN', ['L', 'ER1', 'N']), 'learner': ('NN', ['L', 'ER1', 'N', 'ER0']), 'leasable': ('JJ', ['L', 'IY1', 'S', 'AH0', 'B', 'AH0', 'L']), 'lease': ('NN', ['L', 'IY1', 'S']), 'leased': ('VBN', ['L', 'IY1', 'S', 'T']), 'leasing': ('VBG', ['L', 'IY1', 'S', 'IH0', 'NG']), 'leasehold': ('NN', ['L', 'IY1', 'S', 'HH', 'OW2', 'L', 'D']), 'leaser': ('NN', ['L', 'IY1', 'S', 'ER0']), 'leash': ('NN', ['L', 'IY1', 'SH']), 'leashed': ('VBN', ['L', 'IY1', 'SH', 'T']), 'least': ('JJS', ['L', 'IY1', 'S', 'T']), 'leather': ('NN', ['L', 'EH1', 'DH', 'ER0']), 'leatherback': ('NN', ['L', 'EH1', 'DH', 'ER0', 'B', 'AE2', 'K']), 'leatherwood': ('NN', ['L', 'EH1', 'DH', 'ER0', 'W', 'UH2', 'D']), 'leathery': ('NN', ['L', 'EH1', 'DH', 'ER0', 'IY0']), 'leaving': ('VBG', ['L', 'IY1', 'V', 'IH0', 'NG']), 'leave': ('VB', ['L', 'IY1', 'V']), 'left': ('NN', ['L', 'EH1', 'F', 'T']), 'leaven': ('RB', ['L', 'EH1', 'V', 'AH0', 'N']), 'leavened': ('VBN', ['L', 'EH1', 'V', 'AH0', 'N', 'D']), 'leavening': ('VBG', ['L', 'EH1', 'V', 'AH0', 'N', 'IH0', 'NG']), 'leaver': ('NN', ['L', 'IY1', 'V', 'ER0']), 'leavings': ('NNS', ['L', 'IY1', 'V', 'IH0', 'NG', 'Z']), 'leavy': ('NN', ['L', 'IY1', 'V', 'IY0']), 'lech': ('NN', ['L', 'EH1', 'K']), 'lecher': ('NN', ['L', 'EH1', 'CH', 'ER0']), 'lecherous': ('JJ', ['L', 'EH1', 'CH', 'ER0', 'AH0', 'S']), 'lecithin': ('NN', ['L', 'EH1', 'S', 'AH0', 'TH', 'AH0', 'N']), 'lectern': ('NN', ['L', 'EH1', 'K', 'T', 'ER0', 'N']), 'lector': ('NN', ['L', 'EH1', 'K', 'T', 'ER0']), 'lecture': ('NN', ['L', 'EH1', 'K', 'CH', 'ER0']), 'lectured': ('VBN', ['L', 'EH1', 'K', 'CH', 'ER0', 'D']), 'lecturing': ('VBG', ['L', 'EH1', 'K', 'CH', 'ER0', 'IH0', 'NG']), 'lecturer': ('NN', ['L', 'EH1', 'K', 'CH', 'ER0', 'ER0']), 'ledden': ('NN', ['L', 'EH1', 'D', 'AH0', 'N']), 'ledge': ('NN', ['L', 'EH1', 'JH']), 'ledger': ('NN', ['L', 'EH1', 'JH', 'ER0']), 'lee': ('NN', ['L', 'IY1']), 'lees': ('NNS', ['L', 'IY1', 'Z']), 'leech': ('NN', ['L', 'IY1', 'CH']), 'leed': ('NN', ['L', 'IY1', 'D']), 'leek': ('NN', ['L', 'IY1', 'K']), 'leep': ('NN', ['L', 'IY1', 'P']), 'leese': ('JJ', ['L', 'IY1', 'S']), 'leet': ('NN', ['L', 'IY1', 'T']), 'leeward': ('NN', ['L', 'IY1', 'W', 'ER0', 'D']), 'leeway': ('NN', ['L', 'IY1', 'W', 'EY2']), 'leftward': ('NN', ['L', 'EH1', 'F', 'T', 'W', 'ER0', 'D']), 'leg': ('NN', ['L', 'EH1', 'G']), 'legacies': ('NNS', ['L', 'EH1', 'G', 'AH0', 'S', 'IY0', 'Z']), 'legacy': ('NN', ['L', 'EH1', 'G', 'AH0', 'S', 'IY0']), 'legal': ('JJ', ['L', 'IY1', 'G', 'AH0', 'L']), 'legalism': ('NN', ['L', 'IY1', 'G', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'legality': ('NN', ['L', 'IY0', 'G', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'legalization': ('NN', ['L', 'IY2', 'G', 'AH0', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'legalized': ('VBN', ['L', 'IY1', 'G', 'AH0', 'L', 'AY2', 'Z', 'D']), 'legalizing': ('VBG', ['L', 'IY1', 'G', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'legalize': ('NN', ['L', 'IY1', 'G', 'AH0', 'L', 'AY2', 'Z']), 'legally': ('RB', ['L', 'IY1', 'G', 'AH0', 'L', 'IY0']), 'legate': ('NN', ['L', 'EH1', 'G', 'AH0', 'T']), 'legato': ('NN', ['L', 'AH0', 'G', 'AA1', 'T', 'OW2']), 'lege': ('NN', ['L', 'IY1', 'JH']), 'legend': ('NN', ['L', 'EH1', 'JH', 'AH0', 'N', 'D']), 'legendary': ('JJ', ['L', 'EH1', 'JH', 'AH0', 'N', 'D', 'EH2', 'R', 'IY0']), 'leger': ('NN', ['L', 'EH1', 'JH', 'ER0']), 'legerdemain': ('NN', ['L', 'EH2', 'JH', 'ER0', 'D', 'AH0', 'M', 'EY1', 'N']), 'legge': ('NN', ['L', 'EH1', 'G']), 'legged': ('VBN', ['L', 'EH1', 'G', 'AH0', 'D']), 'legging': ('VBG', ['L', 'EH1', 'G', 'IH0', 'NG']), 'leggy': ('NN', ['L', 'EH1', 'G', 'IY0']), 'leghorn': ('NN', ['L', 'EH1', 'G', 'HH', 'AO0', 'R', 'N']), 'legibility': ('NN', ['L', 'EH2', 'JH', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'legible': ('JJ', ['L', 'EH1', 'JH', 'AH0', 'B', 'AH0', 'L']), 'legion': ('NN', ['L', 'IY1', 'JH', 'AH0', 'N']), 'legionaries': ('NNS', ['L', 'IY1', 'JH', 'AH0', 'N', 'EH2', 'R', 'IY0', 'Z']), 'legislated': ('VBN', ['L', 'EH1', 'JH', 'AH0', 'S', 'L', 'EY2', 'T', 'AH0', 'D']), 'legislating': ('VBG', ['L', 'EH1', 'JH', 'IH0', 'S', 'L', 'EY2', 'T', 'IH0', 'NG']), 'legislate': ('NN', ['L', 'EH1', 'JH', 'IH0', 'S', 'L', 'EY2', 'T']), 'legislation': ('NN', ['L', 'EH2', 'JH', 'AH0', 'S', 'L', 'EY1', 'SH', 'AH0', 'N']), 'legislative': ('JJ', ['L', 'EH1', 'JH', 'AH0', 'S', 'L', 'EY2', 'T', 'IH0', 'V']), 'legislatively': ('RB', ['L', 'EH1', 'JH', 'IH0', 'S', 'L', 'EY2', 'T', 'IH0', 'V', 'L', 'IY0']), 'legislator': ('NN', ['L', 'EH1', 'JH', 'AH0', 'S', 'L', 'EY2', 'T', 'ER0']), 'legislature': ('NN', ['L', 'EH1', 'JH', 'AH0', 'S', 'L', 'EY2', 'CH', 'ER0']), 'legitimacy': ('NN', ['L', 'AH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AH0', 'S', 'IY0']), 'legitimate': ('NN', ['L', 'AH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AH0', 'T']), 'legitimately': ('RB', ['L', 'AH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AH0', 'T', 'L', 'IY0']), 'legitimized': ('VBN', ['L', 'IH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AY2', 'Z', 'D']), 'legitimizing': ('VBG', ['L', 'IH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AY2', 'Z', 'IH0', 'NG']), 'legitimize': ('NN', ['L', 'AH0', 'JH', 'IH1', 'T', 'AH0', 'M', 'AY2', 'Z']), 'legless': ('NN', ['L', 'EH1', 'G', 'L', 'AH0', 'S']), 'legume': ('NN', ['L', 'EH1', 'G', 'Y', 'UW2', 'M']), 'leister': ('NN', ['L', 'IY1', 'S', 'T', 'ER0']), 'lister': ('NN', ['L', 'IH1', 'S', 'T', 'ER0']), 'leisure': ('NN', ['L', 'EH1', 'ZH', 'ER0']), 'leisurely': ('RB', ['L', 'IY1', 'Z', 'ER0', 'L', 'IY0']), 'leitmotif': ('NN', ['L', 'AY1', 'T', 'M', 'OW0', 'T', 'IY2', 'F']), 'leman': ('NN', ['L', 'IY1', 'M', 'AH0', 'N']), 'lemma': ('NN', ['L', 'EH1', 'M', 'AH0']), 'lemming': ('VBG', ['L', 'EH1', 'M', 'IH0', 'NG']), 'lemon': ('NN', ['L', 'EH1', 'M', 'AH0', 'N']), 'lemonade': ('NN', ['L', 'EH1', 'M', 'AH0', 'N', 'EY1', 'D']), 'lemur': ('NN', ['L', 'IY1', 'M', 'ER0']), 'lena': ('NN', ['L', 'IY1', 'N', 'AH0']), 'lent': ('NN', ['L', 'EH1', 'N', 'T']), 'lending': ('NN', ['L', 'EH1', 'N', 'D', 'IH0', 'NG']), 'lend': ('NN', ['L', 'EH1', 'N', 'D']), 'lender': ('NN', ['L', 'EH1', 'N', 'D', 'ER0']), 'lends': ('NNS', ['L', 'EH1', 'N', 'D', 'Z']), 'lene': ('NN', ['L', 'IY1', 'N']), 'lenger': ('NN', ['L', 'EH1', 'NG', 'ER0']), 'length': ('NN', ['L', 'EH1', 'NG', 'K', 'TH']), 'lengthened': ('VBN', ['L', 'EH1', 'NG', 'TH', 'AH0', 'N', 'D']), 'lengthening': ('VBG', ['L', 'EH1', 'NG', 'TH', 'AH0', 'N', 'IH0', 'NG']), 'lengthen': ('NN', ['L', 'EH1', 'NG', 'TH', 'AH0', 'N']), 'lengthways': ('NNS', ['L', 'EH1', 'NG', 'TH', 'W', 'EY2', 'Z']), 'lengthwise': ('NN', ['L', 'EH1', 'NG', 'TH', 'W', 'AY2', 'Z']), 'lengthy': ('NN', ['L', 'EH1', 'NG', 'TH', 'IY0']), 'leniency': ('NN', ['L', 'IY1', 'N', 'Y', 'AH0', 'N', 'S', 'IY0']), 'lenient': ('NN', ['L', 'IY1', 'N', 'IY0', 'AH0', 'N', 'T']), 'leniently': ('RB', ['L', 'IY1', 'N', 'Y', 'AH0', 'N', 'T', 'L', 'IY0']), 'leno': ('NN', ['L', 'EH1', 'N', 'OW0']), 'lenses': ('NNS', ['L', 'EH1', 'N', 'Z', 'AH0', 'Z']), 'lens': ('NNS', ['L', 'EH1', 'N', 'Z']), 'lentil': ('NN', ['L', 'EH1', 'N', 'T', 'AH0', 'L']), 'lento': ('NN', ['L', 'EH1', 'N', 'T', 'OW0']), 'leo': ('NN', ['L', 'IY1', 'OW0']), 'leon': ('NN', ['L', 'IY1', 'AA0', 'N']), 'leonid': ('NN', ['L', 'IY1', 'AH0', 'N', 'IH0', 'D']), 'leopard': ('NN', ['L', 'EH1', 'P', 'ER0', 'D']), 'lep': ('NN', ['L', 'EH1', 'P']), 'leper': ('NN', ['L', 'EH1', 'P', 'ER0']), 'lepre': ('NN', ['L', 'EH1', 'P', 'ER0']), 'leprosy': ('NN', ['L', 'EH1', 'P', 'R', 'AH0', 'S', 'IY0']), 'les': ('NNS', ['L', 'EH1', 'S']), 'lesbian': ('JJ', ['L', 'EH1', 'Z', 'B', 'IY0', 'AH0', 'N']), 'lesion': ('NN', ['L', 'IY1', 'ZH', 'AH0', 'N']), 'less': ('RBR', ['L', 'EH1', 'S']), 'lessee': ('NN', ['L', 'EH0', 'S', 'IY1']), 'lessened': ('VBN', ['L', 'EH1', 'S', 'AH0', 'N', 'D']), 'lessening': ('VBG', ['L', 'EH1', 'S', 'AH0', 'N', 'IH0', 'NG']), 'lessen': ('NN', ['L', 'EH1', 'S', 'AH0', 'N']), 'lesser': ('NN', ['L', 'EH1', 'S', 'ER0']), 'lesson': ('NN', ['L', 'EH1', 'S', 'AH0', 'N']), 'lessor': ('NN', ['L', 'EH1', 'S', 'ER0']), 'lest': ('JJS', ['L', 'EH1', 'S', 'T']), 'let': ('VB', ['L', 'EH1', 'T']), 'letting': ('VBG', ['L', 'EH1', 'T', 'IH0', 'NG']), 'lethal': ('NN', ['L', 'IY1', 'TH', 'AH0', 'L']), 'lethality': ('NN', ['L', 'IY0', 'TH', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'lethargic': ('NN', ['L', 'AH0', 'TH', 'AA1', 'R', 'JH', 'IH0', 'K']), 'lethargy': ('NN', ['L', 'EH1', 'TH', 'ER0', 'JH', 'IY0']), 'letter': ('NN', ['L', 'EH1', 'T', 'ER0']), 'lettered': ('VBN', ['L', 'EH1', 'T', 'ER0', 'D']), 'lettering': ('VBG', ['L', 'EH1', 'T', 'ER0', 'IH0', 'NG']), 'letterpress': ('NN', ['L', 'EH1', 'T', 'ER0', 'P', 'R', 'EH2', 'S']), 'lettish': ('JJ', ['L', 'EH1', 'T', 'IH0', 'SH']), 'letts': ('NN', ['L', 'EH1', 'T', 'S']), 'lettuce': ('NN', ['L', 'EH1', 'T', 'AH0', 'S']), 'levana': ('NN', ['L', 'IH0', 'V', 'AE1', 'N', 'AH0']), 'levant': ('NN', ['L', 'AH0', 'V', 'AE1', 'N', 'T']), 'leve': ('NN', ['L', 'IY1', 'V']), 'levee': ('NN', ['L', 'EH1', 'V', 'IY0']), 'level': ('NN', ['L', 'EH1', 'V', 'AH0', 'L']), 'leveled': ('VBN', ['L', 'EH1', 'V', 'AH0', 'L', 'D']), 'levelled': ('VBN', ['L', 'EH1', 'V', 'AH0', 'L', 'D']), 'leveling': ('NN', ['L', 'EH1', 'V', 'AH0', 'L', 'IH0', 'NG']), 'leven': ('RB', ['L', 'IY1', 'V', 'AH0', 'N']), 'lever': ('NN', ['L', 'EH1', 'V', 'ER0']), 'leverage': ('NN', ['L', 'EH1', 'V', 'ER0', 'IH0', 'JH']), 'leviathan': ('NN', ['L', 'AH0', 'V', 'AY1', 'AH0', 'TH', 'AH0', 'N']), 'levin': ('NN', ['L', 'EH1', 'V', 'IH0', 'N']), 'leviner': ('NN', ['L', 'EH1', 'V', 'IH0', 'N', 'ER0']), 'levitate': ('NN', ['L', 'EH1', 'V', 'IH0', 'T', 'EY2', 'T']), 'levitation': ('NN', ['L', 'EH2', 'V', 'IH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'leviticus': ('NN', ['L', 'AH0', 'V', 'IH1', 'T', 'AH0', 'K', 'AH0', 'S']), 'levity': ('NN', ['L', 'EH1', 'V', 'IH0', 'T', 'IY0']), 'levies': ('NNS', ['L', 'EH1', 'V', 'IY0', 'Z']), 'levy': ('NN', ['L', 'EH1', 'V', 'IY0']), 'levied': ('VBN', ['L', 'EH1', 'V', 'IY0', 'D']), 'levying': ('VBG', ['L', 'EH1', 'V', 'IY0', 'IH0', 'NG']), 'lew': ('NN', ['L', 'UW1']), 'lewd': ('NN', ['L', 'UW1', 'D']), 'lewis': ('NN', ['L', 'UW1', 'IH0', 'S']), 'lex': ('NN', ['L', 'EH1', 'K', 'S']), 'lexical': ('JJ', ['L', 'EH1', 'K', 'S', 'IH0', 'K', 'AH0', 'L']), 'lexicographer': ('NN', ['L', 'EH2', 'K', 'S', 'IH0', 'K', 'AA1', 'G', 'R', 'AH0', 'F', 'ER0']), 'lexicon': ('NN', ['L', 'EH1', 'K', 'S', 'IH0', 'K', 'AA2', 'N']), 'ley': ('NN', ['L', 'EY1']), 'li': ('NN', ['L', 'IY1']), 'liabilities': ('NNS', ['L', 'AY2', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'liability': ('NN', ['L', 'AY2', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'liable': ('JJ', ['L', 'AY1', 'AH0', 'B', 'AH0', 'L']), 'liaison': ('NN', ['L', 'IY0', 'EY1', 'Z', 'AA2', 'N']), 'liane': ('NN', ['L', 'IY0', 'AE1', 'N']), 'liana': ('NN', ['L', 'IY0', 'AA1', 'N', 'AH0']), 'liar': ('NN', ['L', 'AY1', 'ER0']), 'lib': ('NN', ['L', 'IH1', 'B']), 'libel': ('NN', ['L', 'AY1', 'B', 'AH0', 'L']), 'libeled': ('VBN', ['L', 'AY1', 'B', 'AH0', 'L', 'D']), 'libelous': ('JJ', ['L', 'AY1', 'B', 'AH0', 'L', 'AH0', 'S']), 'liberal': ('JJ', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L']), 'liberalism': ('NN', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'liberality': ('NN', ['L', 'IH2', 'B', 'ER0', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'liberalization': ('NN', ['L', 'IH2', 'B', 'AH0', 'R', 'AH0', 'L', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'liberalized': ('VBN', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'D']), 'liberalizing': ('VBG', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'liberalize': ('NN', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L', 'AY2', 'Z']), 'liberally': ('RB', ['L', 'IH1', 'B', 'ER0', 'AH0', 'L', 'IY0']), 'liberated': ('VBN', ['L', 'IH1', 'B', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'liberating': ('VBG', ['L', 'IH1', 'B', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'liberate': ('NN', ['L', 'IH1', 'B', 'ER0', 'EY2', 'T']), 'liberation': ('NN', ['L', 'IH2', 'B', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'liberator': ('NN', ['L', 'IH1', 'B', 'ER0', 'EY0', 'T', 'AH0', 'R']), 'libertarian': ('JJ', ['L', 'IH2', 'B', 'ER0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'libertine': ('NN', ['L', 'IH1', 'B', 'ER0', 'T', 'IY2', 'N']), 'liberties': ('NNS', ['L', 'IH1', 'B', 'ER0', 'T', 'IY0', 'Z']), 'liberty': ('NN', ['L', 'IH1', 'B', 'ER0', 'T', 'IY0']), 'libra': ('NN', ['L', 'IY1', 'B', 'R', 'AH0']), 'librarian': ('JJ', ['L', 'AY0', 'B', 'R', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'libraries': ('NNS', ['L', 'AY1', 'B', 'R', 'EH2', 'R', 'IY0', 'Z']), 'library': ('NN', ['L', 'AY1', 'B', 'R', 'EH2', 'R', 'IY0']), 'libration': ('NN', ['L', 'AY0', 'B', 'R', 'EY1', 'SH', 'AH0', 'N']), 'librettist': ('NN', ['L', 'AH0', 'B', 'R', 'EH1', 'T', 'AH0', 'S', 'T']), 'libretto': ('NN', ['L', 'AH0', 'B', 'R', 'EH1', 'T', 'OW0']), 'libyan': ('NN', ['L', 'IH1', 'B', 'IY0', 'AH0', 'N']), 'lice': ('NN', ['L', 'AY1', 'S']), 'license': ('NN', ['L', 'AY1', 'S', 'AH0', 'N', 'S']), 'licensed': ('VBN', ['L', 'AY1', 'S', 'AH0', 'N', 'S', 'T']), 'licensing': ('NN', ['L', 'AY1', 'S', 'AH0', 'N', 'S', 'IH0', 'NG']), 'licensee': ('NN', ['L', 'AY2', 'S', 'AH0', 'N', 'S', 'IY1']), 'licenser': ('NN', ['L', 'AY1', 'S', 'AH0', 'N', 'S', 'ER0']), 'licensure': ('NN', ['L', 'AY1', 'S', 'AH0', 'N', 'CH', 'ER0']), 'licentious': ('JJ', ['L', 'AY0', 'S', 'EH1', 'N', 'CH', 'AH0', 'S']), 'lich': ('NN', ['L', 'IH1', 'CH']), 'lichen': ('NN', ['L', 'AY1', 'K', 'AH0', 'N']), 'licked': ('VBN', ['L', 'IH1', 'K', 'T']), 'licking': ('VBG', ['L', 'IH1', 'K', 'IH0', 'NG']), 'lick': ('NN', ['L', 'IH1', 'K']), 'licker': ('NN', ['L', 'IH1', 'K', 'ER0']), 'licorice': ('NN', ['L', 'IH1', 'K', 'ER0', 'IH0', 'SH']), 'lid': ('NN', ['L', 'IH1', 'D']), 'lie': ('NN', ['L', 'AY1']), 'lied': ('VBN', ['L', 'AY1', 'D']), 'lying': ('VBG', ['L', 'AY1', 'IH0', 'NG']), 'lien': ('NN', ['L', 'IY1', 'N']), 'lieder': ('NN', ['L', 'IY1', 'D', 'ER0']), 'lief': ('NN', ['L', 'IY1', 'F']), 'liege': ('NN', ['L', 'IY1', 'JH']), 'lieu': ('NN', ['L', 'UW1']), 'lieutenant': ('NN', ['L', 'UW0', 'T', 'EH1', 'N', 'AH0', 'N', 'T']), 'lives': ('NNS', ['L', 'IH1', 'V', 'Z']), 'life': ('NN', ['L', 'AY1', 'F']), 'lifeblood': ('NN', ['L', 'AY1', 'F', 'B', 'L', 'AH2', 'D']), 'lifeboat': ('NN', ['L', 'AY1', 'F', 'B', 'OW2', 'T']), 'lifeless': ('NN', ['L', 'AY1', 'F', 'L', 'AH0', 'S']), 'lifelike': ('NN', ['L', 'AY1', 'F', 'L', 'AY2', 'K']), 'lifelong': ('NN', ['L', 'AY1', 'F', 'L', 'AO1', 'NG']), 'lifetime': ('NN', ['L', 'AY1', 'F', 'T', 'AY2', 'M']), 'lift': ('NN', ['L', 'IH1', 'F', 'T']), 'lifted': ('VBN', ['L', 'IH1', 'F', 'T', 'AH0', 'D']), 'lifting': ('VBG', ['L', 'IH1', 'F', 'T', 'IH0', 'NG']), 'lifter': ('NN', ['L', 'IH1', 'F', 'T', 'ER0']), 'ligament': ('NN', ['L', 'IH1', 'G', 'AH0', 'M', 'AH0', 'N', 'T']), 'ligation': ('NN', ['L', 'AY0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'light': ('NN', ['L', 'AY1', 'T']), 'lighted': ('VBN', ['L', 'AY1', 'T', 'AH0', 'D']), 'lit': ('NN', ['L', 'IH1', 'T']), 'lighting': ('VBG', ['L', 'AY1', 'T', 'IH0', 'NG']), 'lighten': ('NN', ['L', 'AY1', 'T', 'AH0', 'N']), 'lightened': ('VBN', ['L', 'AY1', 'T', 'AH0', 'N', 'D']), 'lightening': ('VBG', ['L', 'AY1', 'T', 'AH0', 'N', 'IH0', 'NG']), 'lighter': ('NN', ['L', 'AY1', 'T', 'ER0']), 'lighthouses': ('NNS', ['L', 'AY1', 'T', 'HH', 'AW2', 'S', 'IH0', 'Z']), 'lighthouse': ('NN', ['L', 'AY1', 'T', 'HH', 'AW2', 'S']), 'lightly': ('RB', ['L', 'AY1', 'T', 'L', 'IY0']), 'lightness': ('NN', ['L', 'AY1', 'T', 'N', 'AH0', 'S']), 'lightning': ('VBG', ['L', 'AY1', 'T', 'N', 'IH0', 'NG']), 'lights': ('NNS', ['L', 'AY1', 'T', 'S']), 'lighty': ('NN', ['L', 'AY1', 'T', 'IY0']), 'lignin': ('NN', ['L', 'IH1', 'G', 'N', 'IH0', 'N']), 'lignite': ('NN', ['L', 'IH1', 'G', 'N', 'AY2', 'T']), 'm': ('NN', ['EH1', 'M']), 'ma': ('NN', ['M', 'AA1']), "ma'am": ('NN', ['M', 'AE1', 'M']), 'mad': ('NN', ['M', 'AE1', 'D']), 'mac': ('NN', ['M', 'AE1', 'K']), 'macao': ('NN', ['M', 'AH0', 'K', 'AW1']), 'macaroni': ('NN', ['M', 'AE2', 'K', 'ER0', 'OW1', 'N', 'IY0']), 'macartney': ('NN', ['M', 'AH0', 'K', 'AA1', 'R', 'T', 'N', 'IY0']), 'macaw': ('NN', ['M', 'AH0', 'K', 'AO1']), 'maccabean': ('NN', ['M', 'AE2', 'K', 'AH0', 'B', 'IY1', 'AH0', 'N']), 'maccabees': ('NNS', ['M', 'AE1', 'K', 'AH0', 'B', 'IY2', 'Z']), 'mace': ('NN', ['M', 'EY1', 'S']), 'macedonian': ('NN', ['M', 'AE2', 'S', 'AH0', 'D', 'OW1', 'N', 'Y', 'AH0', 'N']), 'machete': ('NN', ['M', 'AH0', 'SH', 'EH1', 'T', 'IY2']), 'machination': ('NN', ['M', 'AE2', 'K', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'machine': ('NN', ['M', 'AH0', 'SH', 'IY1', 'N']), 'machined': ('VBN', ['M', 'AH0', 'SH', 'IY1', 'N', 'D']), 'machining': ('NN', ['M', 'AH0', 'SH', 'IY1', 'N', 'IH0', 'NG']), 'machinery': ('NN', ['M', 'AH0', 'SH', 'IY1', 'N', 'ER0', 'IY0']), 'machinist': ('NN', ['M', 'AH0', 'SH', 'IY1', 'N', 'AH0', 'S', 'T']), 'macho': ('NN', ['M', 'AA1', 'CH', 'OW0']), 'macintosh': ('NN', ['M', 'AE1', 'K', 'AH0', 'N', 'T', 'AO2', 'SH']), 'mackerel': ('NN', ['M', 'AE1', 'K', 'ER0', 'AH0', 'L']), 'mackinaw': ('NN', ['M', 'AE1', 'K', 'AH0', 'N', 'AO2']), 'mackintosh': ('NN', ['M', 'AE1', 'K', 'AH0', 'N', 'T', 'AA2', 'SH']), 'madding': ('VBG', ['M', 'AE1', 'D', 'IH0', 'NG']), 'mesdames': ('NNS', ['M', 'EY0', 'D', 'AE1', 'M', 'Z']), 'madam': ('NN', ['M', 'AE1', 'D', 'AH0', 'M']), 'madame': ('NN', ['M', 'AE1', 'D', 'AH0', 'M']), 'madcap': ('NN', ['M', 'AE1', 'D', 'K', 'AE2', 'P']), 'maddening': ('VBG', ['M', 'AE1', 'D', 'AH0', 'N', 'IH0', 'NG']), 'madden': ('NN', ['M', 'AE1', 'D', 'AH0', 'N']), 'madder': ('NN', ['M', 'AE1', 'D', 'ER0']), 'made': ('VBN', ['M', 'EY1', 'D']), 'madeira': ('NN', ['M', 'AH0', 'D', 'IH1', 'R', 'AH0']), 'mademoiselle': ('NN', ['M', 'AE2', 'D', 'AH0', 'M', 'AH0', 'Z', 'EH1', 'L']), 'madge': ('NN', ['M', 'AE1', 'JH']), 'madhouse': ('NN', ['M', 'AE1', 'D', 'HH', 'AW2', 'S']), 'madia': ('NN', ['M', 'AA1', 'D', 'IY0', 'AH0']), 'madly': ('RB', ['M', 'AE1', 'D', 'L', 'IY0']), 'madmen': ('NNS', ['M', 'AE1', 'D', 'M', 'AH0', 'N']), 'madman': ('NN', ['M', 'AE1', 'D', 'M', 'AE2', 'N']), 'madness': ('NN', ['M', 'AE1', 'D', 'N', 'AH0', 'S']), 'madonna': ('NN', ['M', 'AH0', 'D', 'AA1', 'N', 'AH0']), 'madrigal': ('NN', ['M', 'AE1', 'D', 'R', 'AH0', 'G', 'AH0', 'L']), 'n': ('NN', ['EH1', 'N']), 'na': ('NNS', ['N', 'AA1']), 'nab': ('NN', ['N', 'AE1', 'B']), 'nabbed': ('NNS', ['N', 'AE1', 'B', 'D']), 'nabbing': ('VBG', ['N', 'AE1', 'B', 'IH0', 'NG']), 'nabob': ('NN', ['N', 'AE1', 'B', 'AA0', 'B']), 'nad': ('NN', ['N', 'AE1', 'D']), 'nadir': ('NN', ['N', 'EY1', 'D', 'ER0']), 'naeve': ('NNS', ['N', 'IY1', 'V']), 'nag': ('NN', ['N', 'AE1', 'G']), 'nagged': ('VBN', ['N', 'AE1', 'G', 'D']), 'nagging': ('VBG', ['N', 'AE1', 'G', 'IH0', 'NG']), 'naik': ('NN', ['N', 'EY1', 'K']), 'nail': ('NN', ['N', 'EY1', 'L']), 'nailed': ('VBN', ['N', 'EY1', 'L', 'D']), 'nailing': ('VBG', ['N', 'EY1', 'L', 'IH0', 'NG']), 'naive': ('JJ', ['N', 'AY2', 'IY1', 'V']), 'naively': ('RB', ['N', 'AA0', 'IY1', 'V', 'L', 'IY0']), 'naivete': ('JJ', ['N', 'AA0', 'IY2', 'V', 'AH0', 'T', 'EY1']), 'naked': ('JJ', ['N', 'EY1', 'K', 'AH0', 'D']), 'nale': ('NN', ['N', 'EY1', 'L']), 'nall': ('NN', ['N', 'AO1', 'L']), 'nam': ('NNS', ['N', 'AE1', 'M']), 'namby-pamby': ('JJ', ['N', 'AE1', 'M', 'B', 'IY0', 'P', 'AE1', 'M', 'B', 'IY0']), 'name': ('NN', ['N', 'EY1', 'M']), 'named': ('VBN', ['N', 'EY1', 'M', 'D']), 'naming': ('VBG', ['N', 'EY1', 'M', 'IH0', 'NG']), 'nameless': ('NN', ['N', 'EY1', 'M', 'L', 'AH0', 'S']), 'namely': ('RB', ['N', 'EY1', 'M', 'L', 'IY0']), 'namer': ('NN', ['N', 'EY1', 'M', 'ER0']), 'namesake': ('NN', ['N', 'EY1', 'M', 'S', 'EY2', 'K']), 'nan': ('NN', ['N', 'AE1', 'N']), 'nanny': ('NN', ['N', 'AE1', 'N', 'IY0']), 'napped': ('NNS', ['N', 'AE1', 'P', 'T']), 'napping': ('VBG', ['N', 'AE1', 'P', 'IH0', 'NG']), 'nap': ('NN', ['N', 'AE1', 'P']), 'naphtha': ('NN', ['N', 'AE1', 'F', 'TH', 'AH0']), 'naphthalene': ('NN', ['N', 'AE1', 'F', 'TH', 'AH0', 'L', 'IY2', 'N']), 'napkin': ('NN', ['N', 'AE1', 'P', 'K', 'IH0', 'N']), 'napoleon': ('NN', ['N', 'AH0', 'P', 'OW1', 'L', 'IY0', 'AH0', 'N']), 'napoleonic': ('JJ', ['N', 'AH0', 'P', 'OW2', 'L', 'IY0', 'AA1', 'N', 'IH0', 'K']), 'narcissus': ('NN', ['N', 'AA0', 'R', 'S', 'IH1', 'S', 'AH0', 'S']), 'narcotic': ('JJ', ['N', 'AA0', 'R', 'K', 'AA1', 'T', 'IH0', 'K']), 'nard': ('RB', ['N', 'AA1', 'R', 'D']), 'nares': ('NNS', ['N', 'AE1', 'R', 'Z']), 'narrated': ('VBN', ['N', 'EH1', 'R', 'EY2', 'T', 'IH0', 'D']), 'narrate': ('NN', ['N', 'EH1', 'R', 'EY2', 'T']), 'narration': ('NN', ['N', 'EH0', 'R', 'EY1', 'SH', 'AH0', 'N']), 'narrative': ('JJ', ['N', 'AE1', 'R', 'AH0', 'T', 'IH0', 'V']), 'narrator': ('NN', ['N', 'EH1', 'R', 'EY0', 'T', 'ER0']), 'narrow': ('NN', ['N', 'EH1', 'R', 'OW0']), 'narrows': ('NNS', ['N', 'EH1', 'R', 'OW0', 'Z']), 'narrowed': ('VBN', ['N', 'EH1', 'R', 'OW0', 'D']), 'narrowing': ('VBG', ['N', 'EH1', 'R', 'OW0', 'IH0', 'NG']), 'narrower': ('NN', ['N', 'EH1', 'R', 'OW0', 'ER0']), 'narrowly': ('RB', ['N', 'EH1', 'R', 'OW0', 'L', 'IY0']), 'narrowness': ('NN', ['N', 'EH1', 'R', 'OW0', 'N', 'AH0', 'S']), 'nasal': ('NN', ['N', 'EY1', 'Z', 'AH0', 'L']), 'nasally': ('RB', ['N', 'EY1', 'Z', 'AH0', 'L', 'IY0']), 'nascent': ('NN', ['N', 'EY1', 'S', 'AH0', 'N', 'T']), 'nash': ('NN', ['N', 'AE1', 'SH']), 'nastiness': ('NN', ['N', 'AE1', 'S', 'T', 'IY0', 'N', 'AH0', 'S']), 'nasty': ('JJ', ['N', 'AE1', 'S', 'T', 'IY0']), 'nat': ('NN', ['N', 'AE1', 'T']), 'natal': ('JJ', ['N', 'EY1', 'T', 'AH0', 'L']), 'natchez': ('NN', ['N', 'AE1', 'CH', 'EH2', 'Z']), 'nath': ('NN', ['N', 'AE1', 'TH']), 'nation': ('NN', ['N', 'EY1', 'SH', 'AH0', 'N']), 'national': ('JJ', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'nationalism': ('NN', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'nationalist': ('NN', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'AH0', 'S', 'T']), 'nationalities': ('NNS', ['N', 'AE2', 'SH', 'AH0', 'N', 'AE1', 'L', 'IH0', 'T', 'IY0', 'Z']), 'nationality': ('NN', ['N', 'AE2', 'SH', 'AH0', 'N', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'nationalization': ('NN', ['N', 'AE2', 'SH', 'AH0', 'N', 'AH0', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'nationalized': ('VBN', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'AY2', 'Z', 'D']), 'nationalizing': ('VBG', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'nationalize': ('NN', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'AY2', 'Z']), 'nationally': ('RB', ['N', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'native': ('JJ', ['N', 'EY1', 'T', 'IH0', 'V']), 'nativism': ('NN', ['N', 'EY1', 'T', 'IH0', 'V', 'IH2', 'Z', 'AH0', 'M']), 'nativist': ('NN', ['N', 'EY1', 'T', 'IH0', 'V', 'IH2', 'S', 'T']), 'nativity': ('NN', ['N', 'AH0', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0']), 'natter': ('NN', ['N', 'AE1', 'T', 'ER0']), 'natty': ('RB', ['N', 'AE1', 'T', 'IY0']), 'natural': ('JJ', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L']), 'naturalism': ('NN', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'naturalist': ('NN', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'AH0', 'S', 'T']), 'naturalistic': ('JJ', ['N', 'AE2', 'CH', 'ER0', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K']), 'naturalization': ('NN', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'naturalized': ('VBN', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'D']), 'naturalizing': ('VBG', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'naturalize': ('NN', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'AY2', 'Z']), 'naturally': ('RB', ['N', 'AE1', 'CH', 'ER0', 'AH0', 'L', 'IY0']), 'nature': ('NN', ['N', 'EY1', 'CH', 'ER0']), 'natured': ('VBN', ['N', 'EY1', 'CH', 'ER0', 'D']), 'naught': ('NN', ['N', 'AO1', 'T']), 'naughty': ('NN', ['N', 'AO1', 'T', 'IY0']), 'nausea': ('NN', ['N', 'AO1', 'Z', 'IY0', 'AH0']), 'nauseated': ('VBN', ['N', 'AO1', 'Z', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'nauseating': ('VBG', ['N', 'AO1', 'ZH', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'nauseate': ('NN', ['N', 'AO1', 'Z', 'IY0', 'EY2', 'T']), 'nauseous': ('JJ', ['N', 'AO1', 'SH', 'AH0', 'S']), 'nautical': ('JJ', ['N', 'AO1', 'T', 'AH0', 'K', 'AH0', 'L']), 'nautilus': ('NN', ['N', 'AO1', 'T', 'AH0', 'L', 'AH0', 'S']), 'naval': ('NN', ['N', 'EY1', 'V', 'AH0', 'L']), 'nave': ('NN', ['N', 'EY1', 'V']), 'navel': ('NN', ['N', 'EY1', 'V', 'AH0', 'L']), 'navigable': ('JJ', ['N', 'AE1', 'V', 'AH0', 'G', 'AH0', 'B', 'AH0', 'L']), 'navigated': ('VBN', ['N', 'AE1', 'V', 'AH0', 'G', 'EY2', 'T', 'IH0', 'D']), 'navigating': ('VBG', ['N', 'AE1', 'V', 'AH0', 'G', 'EY2', 'T', 'IH0', 'NG']), 'navigate': ('NN', ['N', 'AE1', 'V', 'AH0', 'G', 'EY2', 'T']), 'navigation': ('NN', ['N', 'AE1', 'V', 'AH0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'navigator': ('NN', ['N', 'AE1', 'V', 'AH0', 'G', 'EY2', 'T', 'ER0']), 'navies': ('NNS', ['N', 'EY1', 'V', 'IY0', 'Z']), 'navy': ('NNS', ['N', 'EY1', 'V', 'IY0']), 'nay': ('NN', ['N', 'EY1']), 'nays': ('NNS', ['N', 'EY1', 'Z']), 'ne': ('NN', ['N', 'IY1']), 'neal': ('NN', ['N', 'IY1', 'L']), 'neapolitan': ('NN', ['N', 'IY2', 'AH0', 'P', 'AA1', 'L', 'AH0', 'T', 'AH0', 'N']), 'near': ('IN', ['N', 'IH1', 'R']), 'neared': ('VBN', ['N', 'IH1', 'R', 'D']), 'nearing': ('VBG', ['N', 'IH1', 'R', 'IH0', 'NG']), 'nearly': ('RB', ['N', 'IH1', 'R', 'L', 'IY0']), 'nearsighted': ('VBN', ['N', 'IY1', 'R', 'S', 'AY2', 'T', 'IH0', 'D']), 'nearsightedness': ('NN', ['N', 'IY1', 'R', 'S', 'AY2', 'T', 'IH0', 'D', 'N', 'AH0', 'S']), 'neat': ('NN', ['N', 'IY1', 'T']), 'neatly': ('RB', ['N', 'IY1', 'T', 'L', 'IY0']), 'neatness': ('NN', ['N', 'IY1', 'T', 'N', 'AH0', 'S']), 'nebula': ('NN', ['N', 'EH1', 'B', 'Y', 'AH0', 'L', 'AH0']), 'nebulizer': ('NN', ['N', 'EH1', 'B', 'Y', 'AH0', 'L', 'AY2', 'Z', 'ER0']), 'nebulous': ('JJ', ['N', 'EH1', 'B', 'Y', 'AH0', 'L', 'AH0', 'S']), 'necessarily': ('RB', ['N', 'EH2', 'S', 'AH0', 'S', 'EH1', 'R', 'AH0', 'L', 'IY0']), 'necessary': ('JJ', ['N', 'EH1', 'S', 'AH0', 'S', 'EH2', 'R', 'IY0']), 'necessitated': ('VBN', ['N', 'AH0', 'S', 'EH1', 'S', 'AH0', 'T', 'EY2', 'T', 'AH0', 'D']), 'necessitating': ('VBG', ['N', 'AH0', 'S', 'EH1', 'S', 'IH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'necessitate': ('NN', ['N', 'AH0', 'S', 'EH1', 'S', 'AH0', 'T', 'EY2', 'T']), 'necessities': ('NNS', ['N', 'AH0', 'S', 'EH1', 'S', 'IH0', 'T', 'IY0', 'Z']), 'necessity': ('NN', ['N', 'AH0', 'S', 'EH1', 'S', 'AH0', 'T', 'IY0']), 'neck': ('NN', ['N', 'EH1', 'K']), 'necked': ('NNS', ['N', 'EH1', 'K', 'T']), 'necklace': ('NN', ['N', 'EH1', 'K', 'L', 'AH0', 'S']), 'necktie': ('NN', ['N', 'EH1', 'K', 'T', 'AY2']), 'neckwear': ('JJ', ['N', 'EH1', 'K', 'W', 'EH2', 'R']), 'necrologist': ('NN', ['N', 'AH0', 'K', 'R', 'AO1', 'L', 'AH0', 'JH', 'IH0', 'S', 'T']), 'necrology': ('NN', ['N', 'AH0', 'K', 'R', 'AO1', 'L', 'AH0', 'JH', 'IY0']), 'necromancer': ('NN', ['N', 'EH1', 'K', 'R', 'AH0', 'M', 'AE2', 'N', 'S', 'ER0']), 'necromancy': ('NN', ['N', 'EH1', 'K', 'R', 'AH0', 'M', 'AE2', 'N', 'S', 'IY0']), 'necropolis': ('NNS', ['N', 'AH0', 'K', 'R', 'AA1', 'P', 'AH0', 'L', 'AH0', 'S']), 'necrosis': ('NN', ['N', 'AH0', 'K', 'R', 'OW1', 'S', 'AH0', 'S']), 'nectar': ('NN', ['N', 'EH1', 'K', 'T', 'ER0']), 'nee': ('NN', ['N', 'IY1']), 'need': ('NN', ['N', 'IY1', 'D']), 'needed': ('VBN', ['N', 'IY1', 'D', 'AH0', 'D']), 'needing': ('VBG', ['N', 'IY1', 'D', 'IH0', 'NG']), 'needful': ('JJ', ['N', 'IY1', 'D', 'F', 'AH0', 'L']), 'needle': ('NN', ['N', 'IY1', 'D', 'AH0', 'L']), 'needlefish': ('JJ', ['N', 'IY1', 'D', 'AH0', 'L', 'F', 'IH2', 'SH']), 'needler': ('NN', ['N', 'IY1', 'D', 'AH0', 'L', 'ER0']), 'needless': ('NN', ['N', 'IY1', 'D', 'L', 'AH0', 'S']), 'needlework': ('NN', ['N', 'IY1', 'D', 'AH0', 'L', 'W', 'ER2', 'K']), 'needs': ('NNS', ['N', 'IY1', 'D', 'Z']), 'needy': ('NN', ['N', 'IY1', 'D', 'IY0']), 'neeld': ('NN', ['N', 'IY1', 'L', 'D']), 'neer': ('NN', ['N', 'IH1', 'R']), "ne'er": ('NN', ['N', 'EH1', 'R']), 'neese': ('JJ', ['N', 'IY1', 'Z']), 'nefarious': ('JJ', ['N', 'AH0', 'F', 'EH1', 'R', 'IY0', 'AH0', 'S']), 'negation': ('NN', ['N', 'AH0', 'G', 'EY1', 'SH', 'AH0', 'N']), 'negative': ('JJ', ['N', 'EH1', 'G', 'AH0', 'T', 'IH0', 'V']), 'negatively': ('RB', ['N', 'EH1', 'G', 'AH0', 'T', 'IH0', 'V', 'L', 'IY0']), 'negativity': ('NN', ['N', 'EH2', 'G', 'AH0', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0']), 'neglected': ('VBN', ['N', 'AH0', 'G', 'L', 'EH1', 'K', 'T', 'AH0', 'D']), 'neglecting': ('VBG', ['N', 'IH0', 'G', 'L', 'EH1', 'K', 'T', 'IH0', 'NG']), 'neglect': ('NN', ['N', 'AH0', 'G', 'L', 'EH1', 'K', 'T']), 'neglectful': ('JJ', ['N', 'IH0', 'G', 'L', 'EH1', 'K', 'T', 'F', 'AH0', 'L']), 'negligence': ('NN', ['N', 'EH1', 'G', 'L', 'AH0', 'JH', 'AH0', 'N', 'S']), 'negligent': ('NN', ['N', 'EH1', 'G', 'L', 'AH0', 'JH', 'AH0', 'N', 'T']), 'negligently': ('RB', ['N', 'EH1', 'G', 'L', 'IH0', 'JH', 'AH0', 'N', 'T', 'L', 'IY0']), 'negligible': ('JJ', ['N', 'EH1', 'G', 'L', 'AH0', 'JH', 'AH0', 'B', 'AH0', 'L']), 'negotiable': ('JJ', ['N', 'AH0', 'G', 'OW1', 'SH', 'AH0', 'B', 'AH0', 'L']), 'negotiate': ('NN', ['N', 'AH0', 'G', 'OW1', 'SH', 'IY0', 'EY2', 'T']), 'negotiated': ('VBN', ['N', 'AH0', 'G', 'OW1', 'SH', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'negotiating': ('VBG', ['N', 'IH0', 'G', 'OW1', 'SH', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'negotiation': ('NN', ['N', 'IH0', 'G', 'OW2', 'SH', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'negotiator': ('NN', ['N', 'AH0', 'G', 'OW1', 'SH', 'IY0', 'EY2', 'T', 'ER0']), 'negroes': ('NNS', ['N', 'IY1', 'G', 'R', 'OW0', 'Z']), 'negro': ('NN', ['N', 'IY1', 'G', 'R', 'OW0']), 'negroid': ('NN', ['N', 'IY1', 'G', 'R', 'OY0', 'D']), 'negus': ('NN', ['N', 'IY1', 'G', 'AH0', 'S']), 'neighbor': ('NN', ['N', 'EY1', 'B', 'ER0']), 'neighboring': ('VBG', ['N', 'EY1', 'B', 'ER0', 'IH0', 'NG']), 'neighborhood': ('NN', ['N', 'EY1', 'B', 'ER0', 'HH', 'UH2', 'D']), 'neighborly': ('RB', ['N', 'EY1', 'B', 'ER0', 'L', 'IY0']), 'neither': ('DT', ['N', 'IY1', 'DH', 'ER0']), 'nematode': ('NN', ['N', 'EH1', 'M', 'AH0', 'T', 'OW2', 'D']), 'nemean': ('JJ', ['N', 'IY1', 'M', 'IY0', 'AH0', 'N']), 'nemesis': ('NN', ['N', 'EH1', 'M', 'AH0', 'S', 'IH0', 'S']), 'neophyte': ('NN', ['N', 'IY1', 'AH0', 'F', 'AY2', 'T']), 'neoplasm': ('NN', ['N', 'IY1', 'AH0', 'P', 'L', 'AE2', 'Z', 'AH0', 'M']), 'neoplatonic': ('JJ', ['N', 'IY2', 'OW0', 'P', 'L', 'AH0', 'T', 'AA1', 'N', 'IH0', 'K']), 'neoplatonist': ('NN', ['N', 'IY2', 'OW0', 'P', 'L', 'EY1', 'T', 'AH0', 'N', 'AH0', 'S', 'T']), 'nepa': ('NN', ['N', 'IY1', 'P', 'AH0']), 'nephew': ('NN', ['N', 'EH1', 'F', 'Y', 'UW0']), 'nephridium': ('NN', ['N', 'AH0', 'F', 'R', 'IH1', 'D', 'IY0', 'AH0', 'M']), 'nephrite': ('RB', ['N', 'EH1', 'F', 'R', 'AY0', 'T']), 'nepotism': ('NN', ['N', 'EH1', 'P', 'AH0', 'T', 'IH2', 'Z', 'AH0', 'M']), 'neptune': ('NN', ['N', 'EH1', 'P', 'T', 'UW0', 'N']), 'neptunium': ('NN', ['N', 'EH0', 'P', 'T', 'UW1', 'N', 'IY0', 'AH0', 'M']), 'nero': ('NN', ['N', 'IH1', 'R', 'OW0']), 'nerve': ('NN', ['N', 'ER1', 'V']), 'nervous': ('JJ', ['N', 'ER1', 'V', 'AH0', 'S']), 'nervously': ('RB', ['N', 'ER1', 'V', 'AH0', 'S', 'L', 'IY0']), 'nervousness': ('NN', ['N', 'ER1', 'V', 'AH0', 'S', 'N', 'AH0', 'S']), 'nervy': ('NN', ['N', 'ER1', 'V', 'IY0']), 'ness': ('NN', ['N', 'EH1', 'S']), 'nest': ('JJS', ['N', 'EH1', 'S', 'T']), 'nestled': ('VBN', ['N', 'EH1', 'S', 'AH0', 'L', 'D']), 'nestling': ('VBG', ['N', 'EH1', 'S', 'T', 'L', 'IH0', 'NG']), 'nestle': ('RB', ['N', 'EH1', 'S', 'AH0', 'L']), 'nestor': ('NN', ['N', 'EH1', 'S', 'T', 'ER0']), 'nestorian': ('JJ', ['N', 'EH0', 'S', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'nestorianism': ('NN', ['N', 'EH0', 'S', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N', 'IH0', 'Z', 'AH0', 'M']), 'ney': ('NN', ['N', 'EY1']), 'netted': ('VBN', ['N', 'EH1', 'T', 'IH0', 'D']), 'netting': ('VBG', ['N', 'EH1', 'T', 'IH0', 'NG']), 'net': ('NN', ['N', 'EH1', 'T']), 'nether': ('RB', ['N', 'EH1', 'DH', 'ER0']), 'nettle': ('NN', ['N', 'EH1', 'T', 'AH0', 'L']), 'nettled': ('VBN', ['N', 'EH1', 'T', 'AH0', 'L', 'D']), 'nettles': ('NNS', ['N', 'EH1', 'T', 'AH0', 'L', 'Z']), 'netty': ('RB', ['N', 'EH1', 'T', 'IY0']), 'network': ('NN', ['N', 'EH1', 'T', 'W', 'ER2', 'K']), 'neural': ('JJ', ['N', 'UH1', 'R', 'AH0', 'L']), 'neurasthenia': ('NN', ['N', 'UH2', 'R', 'AE0', 'S', 'TH', 'IY1', 'N', 'IY0', 'AH0']), 'neurological': ('JJ', ['N', 'UH2', 'R', 'AH0', 'L', 'AA1', 'JH', 'IH0', 'K', 'AH0', 'L']), 'neurologist': ('NN', ['N', 'UH0', 'R', 'AA1', 'L', 'AH0', 'JH', 'AH0', 'S', 'T']), 'neurology': ('NN', ['N', 'UH0', 'R', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'neuropathy': ('JJ', ['N', 'UH1', 'R', 'OW0', 'P', 'AE2', 'TH', 'IY0']), 'neurosis': ('NN', ['N', 'UH0', 'R', 'OW1', 'S', 'AH0', 'S']), 'neurotic': ('JJ', ['N', 'UH0', 'R', 'AA1', 'T', 'IH0', 'K']), 'neuter': ('NN', ['N', 'UW1', 'T', 'ER0']), 'neutral': ('JJ', ['N', 'UW1', 'T', 'R', 'AH0', 'L']), 'neutralist': ('NN', ['N', 'UW1', 'T', 'R', 'AH0', 'L', 'AH0', 'S', 'T']), 'neutrality': ('NN', ['N', 'UW0', 'T', 'R', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'neutralization': ('NN', ['N', 'UW2', 'T', 'R', 'AH0', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'neutralized': ('VBN', ['N', 'UW1', 'T', 'R', 'AH0', 'L', 'AY2', 'Z', 'D']), 'neutralizing': ('VBG', ['N', 'UW1', 'T', 'R', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'neutralize': ('NN', ['N', 'UW1', 'T', 'R', 'AH0', 'L', 'AY2', 'Z']), 'neutrally': ('RB', ['N', 'UW1', 'T', 'R', 'AH0', 'L', 'IY0']), 'neve': ('NNS', ['N', 'IY1', 'V']), 'never': ('RB', ['N', 'EH1', 'V', 'ER0']), 'nevermore': ('RB', ['N', 'EH1', 'V', 'ER0', 'M', 'AO2', 'R']), 'nevertheless': ('RB', ['N', 'EH2', 'V', 'ER0', 'DH', 'AH0', 'L', 'EH1', 'S']), 'new': ('JJ', ['N', 'UW1']), 'newborn': ('NNS', ['N', 'UW1', 'B', 'AO0', 'R', 'N']), 'newcome': ('NN', ['N', 'UW1', 'K', 'AH0', 'M']), 'newcomer': ('NN', ['N', 'UW1', 'K', 'AH2', 'M', 'ER0']), 'newfangle': ('NN', ['N', 'UW2', 'F', 'AE1', 'NG', 'G', 'AH0', 'L']), 'newfangled': ('VBN', ['N', 'UW2', 'F', 'AE1', 'NG', 'G', 'AH0', 'L', 'D']), 'newfoundland': ('NN', ['N', 'UW1', 'F', 'AH0', 'N', 'D', 'L', 'AH0', 'N', 'D']), 'newish': ('JJ', ['N', 'UW1', 'IH0', 'SH']), 'newly': ('RB', ['N', 'UW1', 'L', 'IY0']), 'newmarket': ('NN', ['N', 'UW1', 'M', 'AA2', 'R', 'K', 'AH0', 'T']), 'newness': ('NN', ['N', 'UW1', 'N', 'AH0', 'S']), 'news': ('NN', ['N', 'UW1', 'Z']), 'newsmen': ('NNS', ['N', 'UW1', 'Z', 'M', 'IH0', 'N']), 'newsman': ('NN', ['N', 'UW1', 'Z', 'M', 'AE2', 'N']), 'newspaper': ('NN', ['N', 'UW1', 'Z', 'P', 'EY2', 'P', 'ER0']), 'newsroom': ('NN', ['N', 'UW1', 'Z', 'R', 'UW2', 'M']), 'newsy': ('NN', ['N', 'UW1', 'Z', 'IY0']), 'newt': ('NN', ['N', 'UW1', 'T']), 'newtonian': ('JJ', ['N', 'UW0', 'T', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'next': ('JJ', ['N', 'EH1', 'K', 'S', 'T']), 'nexus': ('NN', ['N', 'EH1', 'K', 'S', 'AH0', 'S']), 'nib': ('NN', ['N', 'IH1', 'B']), 'nibbled': ('VBN', ['N', 'IH1', 'B', 'AH0', 'L', 'D']), 'nibbling': ('VBG', ['N', 'IH1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'nibble': ('JJ', ['N', 'IH1', 'B', 'AH0', 'L']), 'niccolite': ('RB', ['N', 'IH1', 'K', 'AH0', 'L', 'AY2', 'T']), 'nice': ('JJ', ['N', 'AY1', 'S']), 'nicely': ('RB', ['N', 'AY1', 'S', 'L', 'IY0']), 'niceness': ('NN', ['N', 'AY1', 'S', 'N', 'AH0', 'S']), 'niceties': ('NNS', ['N', 'AY1', 'S', 'IH0', 'T', 'IY0', 'Z']), 'nicety': ('NN', ['N', 'AY1', 'S', 'IH0', 'T', 'IY0']), 'niche': ('NN', ['N', 'IH1', 'CH']), 'nick': ('NN', ['N', 'IH1', 'K']), 'nicked': ('NNS', ['N', 'IH1', 'K', 'T']), 'nickel': ('NN', ['N', 'IH1', 'K', 'AH0', 'L']), 'nickle': ('NN', ['N', 'IH1', 'K', 'AH0', 'L']), 'nickname': ('NN', ['N', 'IH1', 'K', 'N', 'EY2', 'M']), 'nicknamed': ('VBN', ['N', 'IH1', 'K', 'N', 'EY2', 'M', 'D']), 'nicotine': ('NN', ['N', 'IH1', 'K', 'AH0', 'T', 'IY2', 'N']), 'niece': ('NN', ['N', 'IY1', 'S']), 'niggard': ('RB', ['N', 'IH1', 'G', 'ER0', 'D']), 'niggardliness': ('NN', ['N', 'IH1', 'G', 'ER0', 'D', 'L', 'IY0', 'N', 'AH0', 'S']), 'niggardly': ('RB', ['N', 'IH1', 'G', 'ER0', 'D', 'L', 'IY0']), 'nigger': ('NN', ['N', 'IH1', 'G', 'ER0']), 'nigh': ('JJ', ['N', 'AY1']), 'night': ('NN', ['N', 'AY1', 'T']), 'nightfall': ('NN', ['N', 'AY1', 'T', 'F', 'AO2', 'L']), 'nightingale': ('NN', ['N', 'AY1', 'T', 'IH0', 'NG', 'G', 'EY0', 'L']), 'nightly': ('RB', ['N', 'AY1', 'T', 'L', 'IY0']), 'nightmare': ('NN', ['N', 'AY1', 'T', 'M', 'EH2', 'R']), 'nightshade': ('NN', ['N', 'AY1', 'CH', 'EY2', 'D']), 'nightshirt': ('NN', ['N', 'AY1', 'CH', 'ER2', 'T']), 'nighttime': ('NN', ['N', 'AY1', 'T', 'T', 'AY2', 'M']), 'nihilism': ('NN', ['N', 'AY1', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'nil': ('NN', ['N', 'IH1', 'L']), 'nile': ('NN', ['N', 'AY1', 'L']), 'nill': ('NN', ['N', 'IH1', 'L']), 'nome': ('NN', ['N', 'OW1', 'M']), 'nimble': ('JJ', ['N', 'IH1', 'M', 'B', 'AH0', 'L']), 'nimbly': ('RB', ['N', 'IH1', 'M', 'B', 'L', 'IY0']), 'nimbus': ('NN', ['N', 'IH1', 'M', 'B', 'AH0', 'S']), 'nimmer': ('NN', ['N', 'IH1', 'M', 'ER0']), 'nincompoop': ('NN', ['N', 'IH1', 'NG', 'K', 'AH0', 'M', 'P', 'UW2', 'P']), 'nine': ('CD', ['N', 'AY1', 'N']), 'ninefold': ('NN', ['N', 'IH1', 'N', 'F', 'OW2', 'L', 'D']), 'nineteen': ('NN', ['N', 'AY1', 'N', 'T', 'IY1', 'N']), 'nineteenth': ('NN', ['N', 'AY1', 'N', 'T', 'IY1', 'N', 'TH']), 'ninetieth': ('NNS', ['N', 'AY1', 'N', 'T', 'IY0', 'IH0', 'TH']), 'ninety': ('NN', ['N', 'AY1', 'N', 'T', 'IY0']), 'nineties': ('NNS', ['N', 'AY1', 'N', 'T', 'IY0', 'Z']), 'ninny': ('NN', ['N', 'IH1', 'N', 'IY0']), 'ninth': ('JJ', ['N', 'AY1', 'N', 'TH']), 'niobite': ('RB', ['N', 'AY1', 'OW0', 'B', 'AY2', 'T']), 'niobium': ('NN', ['N', 'AY2', 'OW1', 'B', 'IY0', 'AH0', 'M']), 'nip': ('NN', ['N', 'IH1', 'P']), 'nipped': ('NNS', ['N', 'IH1', 'P', 'T']), 'nipping': ('VBG', ['N', 'IH1', 'P', 'IH0', 'NG']), 'nipper': ('NN', ['N', 'IH1', 'P', 'ER0']), 'nipple': ('NN', ['N', 'IH1', 'P', 'AH0', 'L']), 'nirvana': ('NNS', ['N', 'IH0', 'R', 'V', 'AA1', 'N', 'AH0']), 'nit': ('NN', ['N', 'IH1', 'T']), 'nitrate': ('NN', ['N', 'AY1', 'T', 'R', 'EY2', 'T']), 'nitric': ('JJ', ['N', 'AY1', 'T', 'R', 'IH0', 'K']), 'nitride': ('RB', ['N', 'AY1', 'T', 'R', 'AY0', 'D']), 'nitrocellulose': ('RB', ['N', 'AY2', 'T', 'R', 'OW0', 'S', 'EH1', 'L', 'Y', 'AH0', 'L', 'OW2', 'S']), 'nitrogen': ('NN', ['N', 'AY1', 'T', 'R', 'AH0', 'JH', 'AH0', 'N']), 'nitrogenous': ('JJ', ['N', 'AY0', 'T', 'R', 'AA1', 'JH', 'AH0', 'N', 'AH0', 'S']), 'nitroglycerin': ('NN', ['N', 'AY2', 'T', 'R', 'OW0', 'G', 'L', 'IH1', 'S', 'ER0', 'AH0', 'N']), 'nitrous': ('JJ', ['N', 'IH1', 'T', 'R', 'AH0', 'S']), 'nitty': ('RB', ['N', 'IH1', 'T', 'IY0']), 'nix': ('NN', ['N', 'IH1', 'K', 'S']), 'nixie': ('NN', ['N', 'IH1', 'K', 'S', 'IY0']), 'no': ('DT', ['N', 'OW1']), 'noes': ('NNS', ['N', 'OW1', 'Z']), 'noah': ('NN', ['N', 'OW1', 'AH0']), 'nobility': ('NN', ['N', 'OW0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'noble': ('JJ', ['N', 'OW1', 'B', 'AH0', 'L']), 'nobleman': ('NN', ['N', 'OW1', 'B', 'AH0', 'L', 'M', 'AH0', 'N']), 'noblesse': ('NN', ['N', 'OW0', 'B', 'L', 'EH1', 'S']), 'noblewoman': ('NN', ['N', 'OW1', 'B', 'AH0', 'L', 'W', 'UH2', 'M', 'AH0', 'N']), 'nobly': ('RB', ['N', 'AA1', 'B', 'L', 'IY0']), 'nobodies': ('NNS', ['N', 'OW1', 'B', 'AA2', 'D', 'IY2', 'Z']), 'nobody': ('NN', ['N', 'OW1', 'B', 'AA2', 'D', 'IY2']), 'nock': ('NN', ['N', 'AA1', 'K']), 'nocturnal': ('JJ', ['N', 'AA0', 'K', 'T', 'ER1', 'N', 'AH0', 'L']), 'nod': ('NN', ['N', 'AA1', 'D']), 'nodded': ('VBN', ['N', 'AA1', 'D', 'AH0', 'D']), 'nodding': ('VBG', ['N', 'AA1', 'D', 'IH0', 'NG']), 'node': ('NN', ['N', 'OW1', 'D']), 'nodular': ('JJ', ['N', 'AA1', 'JH', 'AH0', 'L', 'ER0']), 'nodule': ('NN', ['N', 'AA1', 'JH', 'UW0', 'L']), 'noel': ('NN', ['N', 'OW0', 'EH1', 'L']), 'noise': ('NN', ['N', 'OY1', 'Z']), 'noisily': ('RB', ['N', 'OY1', 'Z', 'AH0', 'L', 'IY0']), 'noisy': ('NN', ['N', 'OY1', 'Z', 'IY0']), 'nolde': ('NN', ['N', 'OW1', 'L', 'D']), 'nole': ('NN', ['N', 'OW1', 'L']), 'noll': ('NN', ['N', 'OW1', 'L']), 'nolt': ('NN', ['N', 'OW1', 'L', 'T']), 'nom': ('NN', ['N', 'AA1', 'M']), 'nomad': ('NN', ['N', 'OW1', 'M', 'AE2', 'D']), 'nomadic': ('JJ', ['N', 'OW0', 'M', 'AE1', 'D', 'IH0', 'K']), 'nomenclatural': ('JJ', ['N', 'OW0', 'M', 'AH0', 'N', 'K', 'L', 'EY1', 'CH', 'ER0', 'AH0', 'L']), 'nomenclature': ('NN', ['N', 'OW1', 'M', 'AH0', 'N', 'K', 'L', 'EY2', 'CH', 'ER0']), 'nominal': ('JJ', ['N', 'AA1', 'M', 'AH0', 'N', 'AH0', 'L']), 'nominally': ('RB', ['N', 'AA1', 'M', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'nominated': ('VBN', ['N', 'AA1', 'M', 'AH0', 'N', 'EY2', 'T', 'AH0', 'D']), 'nominating': ('VBG', ['N', 'AA1', 'M', 'AH0', 'N', 'EY2', 'T', 'IH0', 'NG']), 'nominate': ('NN', ['N', 'AA1', 'M', 'AH0', 'N', 'AH0', 'T']), 'nomination': ('NN', ['N', 'AA2', 'M', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'nominee': ('NN', ['N', 'AA2', 'M', 'AH0', 'N', 'IY1']), 'non': ('NN', ['N', 'AA1', 'N']), 'nonce': ('NN', ['N', 'AA1', 'N', 'S']), 'nonchalance': ('NN', ['N', 'AA1', 'N', 'SH', 'AH0', 'L', 'AA1', 'N', 'S']), 'nonchalant': ('NN', ['N', 'AA2', 'N', 'SH', 'AH0', 'L', 'AA1', 'N', 'T']), 'nonchalantly': ('RB', ['N', 'AA1', 'N', 'SH', 'AH0', 'L', 'AA1', 'N', 'T', 'L', 'IY0']), 'noncombatant': ('NN', ['N', 'AA2', 'N', 'K', 'AH0', 'M', 'B', 'AE1', 'T', 'AH0', 'N', 'T']), 'noncommittal': ('JJ', ['N', 'AA1', 'N', 'K', 'AH0', 'M', 'IH1', 'T', 'AH0', 'L']), 'noncompliance': ('NN', ['N', 'AA2', 'N', 'K', 'AH0', 'M', 'P', 'L', 'AY1', 'AH0', 'N', 'S']), 'nonconformist': ('NN', ['N', 'AA2', 'N', 'K', 'AH0', 'N', 'F', 'AO1', 'R', 'M', 'IH0', 'S', 'T']), 'nonconformity': ('NN', ['N', 'AA2', 'N', 'K', 'AH0', 'N', 'F', 'AO1', 'R', 'M', 'AH0', 'T', 'IY0']), 'nondescript': ('NN', ['N', 'AA1', 'N', 'D', 'IH0', 'S', 'K', 'R', 'IH1', 'P', 'T']), 'none': ('NN', ['N', 'AH1', 'N']), 'nonelectrical': ('JJ', ['N', 'AA0', 'N', 'IH0', 'L', 'EH1', 'K', 'T', 'R', 'IH0', 'K', 'AH0', 'L']), 'nonentity': ('NN', ['N', 'AA0', 'N', 'EH1', 'N', 'T', 'AH0', 'T', 'IY0']), 'nonessential': ('JJ', ['N', 'AA2', 'N', 'IH0', 'S', 'EH1', 'N', 'CH', 'AH0', 'L']), 'nonesuch': ('JJ', ['N', 'AH1', 'N', 'S', 'AH1', 'CH']), 'nonexistent': ('NN', ['N', 'AA2', 'N', 'AH0', 'G', 'Z', 'IH1', 'S', 'T', 'AH0', 'N', 'T']), 'nonintervention': ('NN', ['N', 'AA2', 'N', 'IH2', 'N', 'T', 'ER0', 'V', 'EH1', 'N', 'CH', 'AH0', 'N']), 'nonmanufacturing': ('VBG', ['N', 'AA2', 'N', 'M', 'AE2', 'N', 'Y', 'AH0', 'F', 'AE1', 'K', 'CH', 'ER0', 'IH0', 'NG']), 'nonmember': ('NN', ['N', 'AA0', 'N', 'M', 'EH1', 'M', 'B', 'ER0']), 'nonpayment': ('NN', ['N', 'AA0', 'N', 'P', 'EY1', 'M', 'AH0', 'N', 'T']), 'nonplussed': ('VBN', ['N', 'AA0', 'N', 'P', 'L', 'AH1', 'S', 'T']), 'nonprofessional': ('JJ', ['N', 'AA2', 'N', 'P', 'R', 'AH0', 'F', 'EH1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'nonrecurring': ('VBG', ['N', 'AA0', 'N', 'R', 'IH0', 'K', 'ER1', 'IH0', 'NG']), 'nonresident': ('NN', ['N', 'AA0', 'N', 'R', 'EH1', 'Z', 'AH0', 'D', 'AH0', 'N', 'T']), 'nonsense': ('NN', ['N', 'AA1', 'N', 'S', 'EH0', 'N', 'S']), 'nonsensical': ('JJ', ['N', 'AA0', 'N', 'S', 'EH1', 'N', 'S', 'IH0', 'K', 'AH0', 'L']), 'nontoxic': ('NN', ['N', 'AA0', 'N', 'T', 'AA1', 'K', 'S', 'IH0', 'K']), 'noodle': ('NN', ['N', 'UW1', 'D', 'AH0', 'L']), 'nook': ('NN', ['N', 'UH1', 'K']), 'noon': ('NN', ['N', 'UW1', 'N']), 'noose': ('RB', ['N', 'UW1', 'S']), 'nope': ('NN', ['N', 'OW1', 'P']), 'nor': ('CC', ['N', 'AO1', 'R']), 'norm': ('NN', ['N', 'AO1', 'R', 'M']), 'norma': ('NN', ['N', 'AO1', 'R', 'M', 'AH0']), 'normal': ('JJ', ['N', 'AO1', 'R', 'M', 'AH0', 'L']), 'normalcy': ('NN', ['N', 'AO1', 'R', 'M', 'AH0', 'L', 'S', 'IY0']), 'normalization': ('NN', ['N', 'AO2', 'R', 'M', 'AH0', 'L', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'normally': ('RB', ['N', 'AO1', 'R', 'M', 'AH0', 'L', 'IY0']), 'norman': ('NN', ['N', 'AO1', 'R', 'M', 'AH0', 'N']), 'norna': ('NN', ['N', 'AO1', 'R', 'N', 'AH0']), 'norse': ('NN', ['N', 'AO1', 'R', 'S']), 'norsemen': ('NNS', ['N', 'AO1', 'R', 'S', 'M', 'IH0', 'N']), 'north': ('NN', ['N', 'AO1', 'R', 'TH']), 'northeast': ('NN', ['N', 'AO2', 'R', 'TH', 'IY1', 'S', 'T']), 'northeasterly': ('RB', ['N', 'AO2', 'R', 'TH', 'IY1', 'S', 'T', 'ER0', 'L', 'IY0']), 'northeastern': ('JJ', ['N', 'AO2', 'R', 'TH', 'IY1', 'S', 'T', 'ER0', 'N']), 'northeastward': ('RB', ['N', 'AO2', 'R', 'TH', 'IY1', 'S', 'T', 'W', 'ER0', 'D']), 'norther': ('RB', ['N', 'AO1', 'R', 'DH', 'ER0']), 'northerly': ('RB', ['N', 'AO1', 'R', 'DH', 'ER0', 'L', 'IY0']), 'northern': ('JJ', ['N', 'AO1', 'R', 'DH', 'ER0', 'N']), 'northerner': ('NN', ['N', 'AO1', 'R', 'DH', 'ER0', 'N', 'ER0']), 'northernmost': ('NN', ['N', 'AO1', 'R', 'DH', 'ER0', 'N', 'M', 'OW2', 'S', 'T']), 'northward': ('RB', ['N', 'AO1', 'R', 'TH', 'W', 'ER0', 'D']), 'northwardly': ('RB', ['N', 'AO1', 'R', 'TH', 'W', 'ER0', 'D', 'L', 'IY0']), 'northwest': ('RB', ['N', 'AO2', 'R', 'TH', 'W', 'EH1', 'S', 'T']), 'northwesterly': ('RB', ['N', 'AO2', 'R', 'TH', 'W', 'EH1', 'S', 'T', 'ER0', 'L', 'IY0']), 'northwestern': ('JJ', ['N', 'AO2', 'R', 'TH', 'W', 'EH1', 'S', 'T', 'ER0', 'N']), 'norwegian': ('JJ', ['N', 'AO2', 'R', 'W', 'IY1', 'JH', 'AH0', 'N']), 'nose': ('RB', ['N', 'OW1', 'Z']), 'nosed': ('VBN', ['N', 'OW1', 'Z', 'D']), 'nosing': ('VBG', ['N', 'OW1', 'Z', 'IH0', 'NG']), 'nosebleed': ('NN', ['N', 'OW1', 'Z', 'B', 'L', 'IY2', 'D']), 'nostalgia': ('NN', ['N', 'AO0', 'S', 'T', 'AE1', 'L', 'JH', 'AH0']), 'nostalgic': ('NN', ['N', 'AO0', 'S', 'T', 'AE1', 'L', 'JH', 'IH0', 'K']), 'nostril': ('NN', ['N', 'AA1', 'S', 'T', 'R', 'IH0', 'L']), 'nostrums': ('NNS', ['N', 'AA1', 'S', 'T', 'R', 'AH0', 'M', 'Z']), 'not': ('RB', ['N', 'AA1', 'T']), 'notable': ('JJ', ['N', 'OW1', 'T', 'AH0', 'B', 'AH0', 'L']), 'notably': ('RB', ['N', 'OW1', 'T', 'AH0', 'B', 'L', 'IY0']), 'notary': ('JJ', ['N', 'OW1', 'T', 'ER0', 'IY0']), 'notation': ('NN', ['N', 'OW0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'notch': ('NN', ['N', 'AA1', 'CH']), 'notched': ('VBN', ['N', 'AA1', 'CH', 'T']), 'note': ('NN', ['N', 'OW1', 'T']), 'noted': ('VBN', ['N', 'OW1', 'T', 'AH0', 'D']), 'noting': ('VBG', ['N', 'OW1', 'T', 'IH0', 'NG']), 'notebook': ('NN', ['N', 'OW1', 'T', 'B', 'UH2', 'K']), 'noteworthy': ('JJ', ['N', 'OW1', 'T', 'W', 'ER2', 'DH', 'IY0']), 'nother': ('RB', ['N', 'AH1', 'DH', 'ER0']), 'nothing': ('NN', ['N', 'AH1', 'TH', 'IH0', 'NG']), 'nothingness': ('NN', ['N', 'AH1', 'TH', 'IH0', 'NG', 'N', 'AH0', 'S']), 'notice': ('NN', ['N', 'OW1', 'T', 'AH0', 'S']), 'noticed': ('VBN', ['N', 'OW1', 'T', 'AH0', 'S', 'T']), 'noticing': ('VBG', ['N', 'OW1', 'T', 'IH0', 'S', 'IH0', 'NG']), 'noticeable': ('JJ', ['N', 'OW1', 'T', 'AH0', 'S', 'AH0', 'B', 'AH0', 'L']), 'noticeably': ('RB', ['N', 'OW1', 'T', 'IH0', 'S', 'AH0', 'B', 'L', 'IY0']), 'notification': ('NN', ['N', 'OW2', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'notified': ('VBN', ['N', 'OW1', 'T', 'AH0', 'F', 'AY2', 'D']), 'notifying': ('VBG', ['N', 'OW1', 'T', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'notify': ('NN', ['N', 'OW1', 'T', 'AH0', 'F', 'AY2']), 'notion': ('NN', ['N', 'OW1', 'SH', 'AH0', 'N']), 'notochord': ('NN', ['N', 'OW1', 'T', 'AH0', 'K', 'AO2', 'R', 'D']), 'notoriety': ('NN', ['N', 'OW2', 'T', 'ER0', 'AY1', 'AH0', 'T', 'IY0']), 'notorious': ('JJ', ['N', 'OW0', 'T', 'AO1', 'R', 'IY0', 'AH0', 'S']), 'nott': ('NN', ['N', 'AA1', 'T']), 'notwithstanding': ('VBG', ['N', 'AA2', 'T', 'W', 'IH0', 'TH', 'S', 'T', 'AE1', 'N', 'D', 'IH0', 'NG']), 'noun': ('NN', ['N', 'AW1', 'N']), 'nourished': ('VBN', ['N', 'ER1', 'IH0', 'SH', 'T']), 'nourishing': ('VBG', ['N', 'ER1', 'IH0', 'SH', 'IH0', 'NG']), 'nourish': ('JJ', ['N', 'ER1', 'IH0', 'SH']), 'nourishment': ('NN', ['N', 'ER1', 'IH0', 'SH', 'M', 'AH0', 'N', 'T']), 'nous': ('JJ', ['N', 'UW1', 'S']), 'novel': ('NN', ['N', 'AA1', 'V', 'AH0', 'L']), 'novelist': ('NN', ['N', 'AA1', 'V', 'AH0', 'L', 'AH0', 'S', 'T']), 'novelties': ('NNS', ['N', 'AA1', 'V', 'AH0', 'L', 'T', 'IY0', 'Z']), 'novelty': ('NN', ['N', 'AA1', 'V', 'AH0', 'L', 'T', 'IY0']), 'november': ('NN', ['N', 'OW0', 'V', 'EH1', 'M', 'B', 'ER0']), 'novice': ('NN', ['N', 'AA1', 'V', 'AH0', 'S']), 'novum': ('NN', ['N', 'OW1', 'V', 'AH0', 'M']), 'now': ('RB', ['N', 'AW1']), 'nowadays': ('NNS', ['N', 'AW1', 'AH0', 'D', 'EY2', 'Z']), 'nowhere': ('RB', ['N', 'OW1', 'W', 'EH2', 'R']), 'noxious': ('JJ', ['N', 'AA1', 'K', 'SH', 'AH0', 'S']), 'nozzle': ('NN', ['N', 'AA1', 'Z', 'AH0', 'L']), 'nuance': ('NN', ['N', 'UW1', 'AA0', 'N', 'S']), 'nub': ('NN', ['N', 'AH1', 'B']), 'nubian': ('JJ', ['N', 'Y', 'UW1', 'B', 'IY0', 'AH0', 'N']), 'nuclear': ('JJ', ['N', 'UW1', 'K', 'L', 'IY0', 'ER0']), 'nuclei': ('NN', ['N', 'UW1', 'K', 'L', 'IY0', 'AY2']), 'nucleus': ('NN', ['N', 'UW1', 'K', 'L', 'IY0', 'AH0', 'S']), 'nude': ('NN', ['N', 'UW1', 'D']), 'nudging': ('VBG', ['N', 'AH1', 'JH', 'IH0', 'NG']), 'nudge': ('NN', ['N', 'AH1', 'JH']), 'nudity': ('NN', ['N', 'UW1', 'D', 'IH0', 'T', 'IY0']), 'nugget': ('NN', ['N', 'AH1', 'G', 'IH0', 'T']), 'nuisance': ('NN', ['N', 'UW1', 'S', 'AH0', 'N', 'S']), 'null': ('NN', ['N', 'AH1', 'L']), 'nullification': ('NN', ['N', 'AH2', 'L', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'nullified': ('JJ', ['N', 'AH1', 'L', 'AH0', 'F', 'AY2', 'D']), 'nullifying': ('VBG', ['N', 'AH1', 'L', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'nullify': ('NN', ['N', 'AH1', 'L', 'AH0', 'F', 'AY2']), 'numb': ('NN', ['N', 'AH1', 'M']), 'numbed': ('NNS', ['N', 'AH1', 'M', 'D']), 'numbing': ('VBG', ['N', 'AH1', 'M', 'IH0', 'NG']), 'number': ('NN', ['N', 'AH1', 'M', 'B', 'ER0']), 'numbered': ('JJ', ['N', 'AH1', 'M', 'B', 'ER0', 'D']), 'numbering': ('VBG', ['N', 'AH1', 'M', 'B', 'ER0', 'IH0', 'NG']), 'numbers': ('NNS', ['N', 'AH1', 'M', 'B', 'ER0', 'Z']), 'numbness': ('NN', ['N', 'AH1', 'M', 'N', 'AH0', 'S']), 'numeral': ('JJ', ['N', 'UW1', 'M', 'ER0', 'AH0', 'L']), 'numeric': ('JJ', ['N', 'UW0', 'M', 'EH1', 'R', 'IH0', 'K']), 'numerical': ('JJ', ['N', 'UW0', 'M', 'EH1', 'R', 'AH0', 'K', 'AH0', 'L']), 'o': ('NN', ['OW1']), "o's": ('NN', ['OW1', 'Z']), "o'": ('NN', ['OW1']), 'oad': ('NN', ['OW1', 'EY1', 'D', 'IY1']), 'oak': ('NN', ['OW1', 'K']), 'oaky': ('NN', ['OW1', 'K', 'IY0']), 'oar': ('NN', ['AO1', 'R']), 'oared': ('VBN', ['AO1', 'R', 'D']), 'oarlock': ('NN', ['AO1', 'R', 'L', 'AA2', 'K']), 'oarsman': ('NN', ['AO1', 'R', 'Z', 'M', 'AH0', 'N']), 'oases': ('NNS', ['OW0', 'EY1', 'S', 'IY0', 'Z']), 'oasis': ('NN', ['OW0', 'EY1', 'S', 'IH0', 'S']), 'oats': ('NNS', ['OW1', 'T', 'S']), 'oat': ('NN', ['OW1', 'T']), 'oaths': ('NNS', ['OW1', 'DH', 'Z']), 'oath': ('NN', ['OW1', 'TH']), 'oatmeal': ('NN', ['OW1', 'T', 'M', 'IY2', 'L']), 'obdurate': ('NN', ['AA1', 'B', 'D', 'ER0', 'AH0', 'T']), 'obedience': ('NN', ['OW0', 'B', 'IY1', 'D', 'IY0', 'AH0', 'N', 'S']), 'obedient': ('NN', ['OW0', 'B', 'IY1', 'D', 'IY0', 'AH0', 'N', 'T']), 'obediently': ('RB', ['OW0', 'B', 'IY1', 'D', 'IY0', 'AH0', 'N', 'T', 'L', 'IY0']), 'p': ('NN', ['P', 'IY1']), 'pa': ('NN', ['P', 'AA1']), 'pac': ('NN', ['P', 'AE1', 'K']), 'paca': ('NN', ['P', 'AA1', 'K', 'AH0']), 'pace': ('NN', ['P', 'EY1', 'S']), 'paced': ('VBN', ['P', 'EY1', 'S', 'T']), 'pacing': ('VBG', ['P', 'EY1', 'S', 'IH0', 'NG']), 'pacer': ('NN', ['P', 'EY1', 'S', 'ER0']), 'pacific': ('NN', ['P', 'AH0', 'S', 'IH1', 'F', 'IH0', 'K']), 'pacification': ('NN', ['P', 'AE2', 'S', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'pacified': ('VBN', ['P', 'AE1', 'S', 'AH0', 'F', 'AY2', 'D']), 'pacify': ('NN', ['P', 'AE1', 'S', 'AH0', 'F', 'AY2']), 'pack': ('NN', ['P', 'AE1', 'K']), 'packed': ('NNS', ['P', 'AE1', 'K', 'T']), 'packing': ('VBG', ['P', 'AE1', 'K', 'IH0', 'NG']), 'package': ('NN', ['P', 'AE1', 'K', 'AH0', 'JH']), 'packer': ('NN', ['P', 'AE1', 'K', 'ER0']), 'packet': ('NN', ['P', 'AE1', 'K', 'AH0', 'T']), 'packman': ('NN', ['P', 'AE1', 'K', 'M', 'AH0', 'N']), 'paco': ('NN', ['P', 'EY1', 'K', 'OW0']), 'pact': ('NN', ['P', 'AE1', 'K', 'T']), 'pad': ('NN', ['P', 'AE1', 'D']), 'padded': ('VBN', ['P', 'AE1', 'D', 'AH0', 'D']), 'padding': ('VBG', ['P', 'AE1', 'D', 'IH0', 'NG']), 'paddle': ('NN', ['P', 'AE1', 'D', 'AH0', 'L']), 'paddled': ('VBN', ['P', 'AE1', 'D', 'AH0', 'L', 'D']), 'paddling': ('VBG', ['P', 'AE1', 'D', 'AH0', 'L', 'IH0', 'NG']), 'paddock': ('NN', ['P', 'AE1', 'D', 'AH0', 'K']), 'paddy': ('NN', ['P', 'AE1', 'D', 'IY0']), 'paddies': ('NNS', ['P', 'AE1', 'D', 'IY0', 'Z']), 'padlock': ('NN', ['P', 'AE1', 'D', 'L', 'AA2', 'K']), 'padlocked': ('NNS', ['P', 'AE1', 'D', 'L', 'AA2', 'K', 'T']), 'paean': ('NN', ['P', 'IY1', 'AH0', 'N']), 'pagan': ('NN', ['P', 'EY1', 'G', 'AH0', 'N']), 'paganism': ('NN', ['P', 'EY1', 'G', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'page': ('NN', ['P', 'EY1', 'JH']), 'paged': ('VBN', ['P', 'EY1', 'JH', 'D']), 'paging': ('VBG', ['P', 'EY1', 'JH', 'IH0', 'NG']), 'pageant': ('NN', ['P', 'AE1', 'JH', 'AH0', 'N', 'T']), 'pageantry': ('NN', ['P', 'AE1', 'JH', 'AH0', 'N', 'T', 'R', 'IY0']), 'pagination': ('NN', ['P', 'AE2', 'JH', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'pagoda': ('NN', ['P', 'AH0', 'G', 'OW1', 'D', 'AH0']), 'pagurian': ('NN', ['P', 'AH0', 'G', 'Y', 'UH1', 'R', 'IY0', 'AH0', 'N']), 'pah': ('NN', ['P', 'AE1']), 'paid': ('NN', ['P', 'EY1', 'D']), 'pail': ('NN', ['P', 'EY1', 'L']), 'pain': ('NN', ['P', 'EY1', 'N']), 'pained': ('VBN', ['P', 'EY1', 'N', 'D']), 'painful': ('NN', ['P', 'EY1', 'N', 'F', 'AH0', 'L']), 'painless': ('NN', ['P', 'EY1', 'N', 'L', 'AH0', 'S']), 'pains': ('NNS', ['P', 'EY1', 'N', 'Z']), 'painstaking': ('VBG', ['P', 'EY1', 'N', 'S', 'T', 'EY2', 'K', 'IH0', 'NG']), 'painted': ('VBN', ['P', 'EY1', 'N', 'T', 'AH0', 'D']), 'painting': ('NN', ['P', 'EY1', 'N', 'T', 'IH0', 'NG']), 'paint': ('NN', ['P', 'EY1', 'N', 'T']), 'painter': ('NN', ['P', 'EY1', 'N', 'T', 'ER0']), 'painterly': ('RB', ['P', 'EY1', 'N', 'T', 'ER0', 'L', 'IY0']), 'pair': ('NN', ['P', 'EH1', 'R']), 'paired': ('VBN', ['P', 'EH1', 'R', 'D']), 'pairing': ('VBG', ['P', 'EH1', 'R', 'IH0', 'NG']), 'pais': ('NN', ['P', 'EY1', 'Z']), 'pal': ('NN', ['P', 'AE1', 'L']), 'palace': ('NN', ['P', 'AE1', 'L', 'AH0', 'S']), 'paladin': ('NN', ['P', 'AE1', 'L', 'AH0', 'D', 'IH0', 'N']), 'palatability': ('NN', ['P', 'AE2', 'L', 'AH0', 'T', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'palatable': ('JJ', ['P', 'AE1', 'L', 'AH0', 'T', 'AH0', 'B', 'AH0', 'L']), 'palate': ('NN', ['P', 'AE1', 'L', 'AH0', 'T']), 'palatial': ('JJ', ['P', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'L']), 'palatine': ('NN', ['P', 'AE1', 'L', 'AH0', 'T', 'AY2', 'N']), 'pale': ('NN', ['P', 'EY1', 'L']), 'paled': ('VBN', ['P', 'EY1', 'L', 'D']), 'paleobotany': ('NN', ['P', 'EY2', 'L', 'IY0', 'OW0', 'B', 'AA1', 'T', 'AH0', 'N', 'IY0']), 'paleontologist': ('NN', ['P', 'EY2', 'L', 'IY0', 'AH0', 'N', 'T', 'AA1', 'L', 'AH0', 'JH', 'IH0', 'S', 'T']), 'paleontology': ('NN', ['P', 'EY2', 'L', 'IY0', 'AH0', 'N', 'T', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'paleozoic': ('NN', ['P', 'EY2', 'L', 'IY0', 'AH0', 'Z', 'OW1', 'IH0', 'K']), 'palestinian': ('JJ', ['P', 'AE2', 'L', 'IH0', 'S', 'T', 'IH1', 'N', 'IY0', 'AH0', 'N']), 'palette': ('NN', ['P', 'AE1', 'L', 'AH0', 'T']), 'palfrey': ('NN', ['P', 'AE1', 'L', 'F', 'R', 'IY0']), 'palisade': ('NN', ['P', 'AE2', 'L', 'IH0', 'S', 'EY1', 'D']), 'pall': ('NN', ['P', 'AA1', 'L']), 'palla': ('NN', ['P', 'AE1', 'L', 'AH0']), 'palladium': ('NN', ['P', 'AH0', 'L', 'EY1', 'D', 'IY0', 'AH0', 'M']), 'pallas': ('NNS', ['P', 'AE1', 'L', 'AH0', 'S']), 'pallet': ('NN', ['P', 'AE1', 'L', 'AH0', 'T']), 'palliative': ('NN', ['P', 'AE1', 'L', 'IY0', 'AH0', 'T', 'IH0', 'V']), 'pallid': ('NN', ['P', 'AE1', 'L', 'AH0', 'D']), 'pallone': ('NN', ['P', 'AA0', 'L', 'OW1', 'N', 'IY0']), 'palm': ('NN', ['P', 'AA1', 'M']), 'palmer': ('NN', ['P', 'AA1', 'M', 'ER0']), 'palmetto': ('NN', ['P', 'AE0', 'L', 'M', 'EH1', 'T', 'OW0']), 'palmistry': ('NN', ['P', 'AA1', 'M', 'IH0', 'S', 'T', 'R', 'IY0']), 'q': ('NN', ['K', 'Y', 'UW1']), 'qua': ('NN', ['K', 'W', 'AA1']), 'quack': ('NN', ['K', 'W', 'AE1', 'K']), 'quackery': ('NN', ['K', 'W', 'AE1', 'K', 'ER0', 'IY0']), 'quad': ('NN', ['K', 'W', 'AA1', 'D']), 'quade': ('NN', ['K', 'W', 'EY1', 'D']), 'quadra': ('NN', ['K', 'W', 'AE1', 'D', 'R', 'AH0']), 'r': ('NN', ['AA1', 'R']), 'ra': ('NN', ['R', 'AA1']), 'rab': ('NN', ['R', 'AE1', 'B']), 'rabbis': ('NN', ['R', 'AE1', 'B', 'AY2', 'Z']), 'rabbi': ('NN', ['R', 'AE1', 'B', 'AY2']), 'rabbinical': ('JJ', ['R', 'AH0', 'B', 'IH1', 'N', 'IH0', 'K', 'AH0', 'L']), 'rabbit': ('NN', ['R', 'AE1', 'B', 'AH0', 'T']), 'rabble': ('JJ', ['R', 'AE1', 'B', 'AH0', 'L']), 'rabid': ('NN', ['R', 'AE1', 'B', 'IH0', 'D']), 'rabies': ('NNS', ['R', 'EY1', 'B', 'IY0', 'Z']), 'raccoon': ('NN', ['R', 'AE0', 'K', 'UW1', 'N']), 'race': ('NN', ['R', 'EY1', 'S']), 'raced': ('VBN', ['R', 'EY1', 'S', 'T']), 'racing': ('VBG', ['R', 'EY1', 'S', 'IH0', 'NG']), 'racer': ('NN', ['R', 'EY1', 'S', 'ER0']), 'rach': ('NN', ['R', 'AE1', 'CH']), 'racial': ('JJ', ['R', 'EY1', 'SH', 'AH0', 'L']), 'raciness': ('NN', ['R', 'EY1', 'S', 'IY0', 'N', 'AH0', 'S']), 'rack': ('NN', ['R', 'AE1', 'K']), 'racked': ('VBN', ['R', 'AE1', 'K', 'T']), 'racking': ('VBG', ['R', 'AE1', 'K', 'IH0', 'NG']), 'racker': ('NN', ['R', 'AE1', 'K', 'ER0']), 'racket': ('NN', ['R', 'AE1', 'K', 'IH0', 'T']), 'raconteur': ('NN', ['R', 'AE2', 'K', 'AA0', 'N', 'T', 'UW1', 'R']), 'racquet': ('NN', ['R', 'AE1', 'K', 'IH0', 'T']), 'racy': ('NN', ['R', 'EY1', 'S', 'IY0']), 'rad': ('NN', ['R', 'AE1', 'D']), 'radde': ('NN', ['R', 'AE1', 'D']), 'radial': ('NN', ['R', 'EY1', 'D', 'IY0', 'AH0', 'L']), 'radially': ('RB', ['R', 'EY1', 'D', 'IY0', 'AH0', 'L', 'IY0']), 'radiance': ('NN', ['R', 'EY1', 'D', 'IY0', 'AH0', 'N', 'S']), 'radiant': ('NN', ['R', 'EY1', 'D', 'IY0', 'AH0', 'N', 'T']), 'radiated': ('VBN', ['R', 'EY1', 'D', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'radiating': ('VBG', ['R', 'EY1', 'D', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'radiate': ('NN', ['R', 'EY1', 'D', 'IY0', 'AH0', 'T']), 'radiation': ('NN', ['R', 'EY2', 'D', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'radiator': ('NN', ['R', 'EY1', 'D', 'IY0', 'EY2', 'T', 'ER0']), 'radical': ('JJ', ['R', 'AE1', 'D', 'AH0', 'K', 'AH0', 'L']), 'radicalism': ('NN', ['R', 'AE1', 'D', 'IH0', 'K', 'AH0', 'L', 'IH2', 'Z', 'AH0', 'M']), 'radically': ('RB', ['R', 'AE1', 'D', 'IH0', 'K', 'L', 'IY0']), 'radish': ('NN', ['R', 'AE1', 'D', 'IH0', 'SH']), 'radius': ('NN', ['R', 'EY1', 'D', 'IY0', 'AH0', 'S']), 'raff': ('NN', ['R', 'AE1', 'F']), 'raffish': ('NN', ['R', 'AE1', 'F', 'IH0', 'SH']), 'raffle': ('NN', ['R', 'AE1', 'F', 'AH0', 'L']), 'raft': ('NN', ['R', 'AE1', 'F', 'T']), 'rafted': ('VBN', ['R', 'AE1', 'F', 'T', 'AH0', 'D']), 'rafting': ('VBG', ['R', 'AE1', 'F', 'T', 'IH0', 'NG']), 'rafter': ('NN', ['R', 'AE1', 'F', 'T', 'ER0']), 'rag': ('NN', ['R', 'AE1', 'G']), 'ragged': ('VBN', ['R', 'AE1', 'G', 'AH0', 'D']), 'rage': ('NN', ['R', 'EY1', 'JH']), 'raged': ('VBN', ['R', 'EY1', 'JH', 'D']), 'raging': ('VBG', ['R', 'EY1', 'JH', 'IH0', 'NG']), 'ragmen': ('NNS', ['R', 'AE1', 'G', 'M', 'AH0', 'N']), 'ragweed': ('NN', ['R', 'AE1', 'G', 'W', 'IY2', 'D']), 'raia': ('NN', ['R', 'AA1', 'Y', 'AH0']), 'raid': ('NN', ['R', 'EY1', 'D']), 'raided': ('VBN', ['R', 'EY1', 'D', 'IH0', 'D']), 'raiding': ('VBG', ['R', 'EY1', 'D', 'IH0', 'NG']), 'raider': ('NN', ['R', 'EY1', 'D', 'ER0']), 'rail': ('NN', ['R', 'EY1', 'L']), 'railed': ('VBN', ['R', 'EY1', 'L', 'D']), 'railing': ('VBG', ['R', 'EY1', 'L', 'IH0', 'NG']), 'railroad': ('NN', ['R', 'EY1', 'L', 'R', 'OW2', 'D']), 'railway': ('NN', ['R', 'EY1', 'L', 'W', 'EY2']), 'railroading': ('VBG', ['R', 'EY1', 'L', 'R', 'OW2', 'D', 'IH0', 'NG']), 'rain': ('NN', ['R', 'EY1', 'N']), 'rained': ('VBN', ['R', 'EY1', 'N', 'D']), 'raining': ('VBG', ['R', 'EY1', 'N', 'IH0', 'NG']), 'rainbow': ('NN', ['R', 'EY1', 'N', 'B', 'OW2']), 'raindrop': ('NN', ['R', 'EY1', 'N', 'D', 'R', 'AA2', 'P']), 'rainfall': ('NN', ['R', 'EY1', 'N', 'F', 'AO2', 'L']), 'rainy': ('NN', ['R', 'EY1', 'N', 'IY0']), 'rais': ('NN', ['R', 'EY1', 'S']), 'raised': ('VBN', ['R', 'EY1', 'Z', 'D']), 'raising': ('VBG', ['R', 'EY1', 'Z', 'IH0', 'NG']), 'raise': ('NN', ['R', 'EY1', 'Z']), 'raiser': ('NN', ['R', 'EY1', 'Z', 'ER0']), 'raisin': ('NN', ['R', 'EY1', 'Z', 'IH0', 'N']), 'raj': ('NN', ['R', 'AA1', 'ZH']), 'rake': ('NN', ['R', 'EY1', 'K']), 'raked': ('VBN', ['R', 'EY1', 'K', 'T']), 'raking': ('VBG', ['R', 'EY1', 'K', 'IH0', 'NG']), 'raker': ('NN', ['R', 'EY1', 'K', 'ER0']), 'rakish': ('NN', ['R', 'EY1', 'K', 'IH0', 'SH']), 'rallied': ('VBD', ['R', 'AE1', 'L', 'IY0', 'D']), 'rallying': ('VBG', ['R', 'AE1', 'L', 'IY0', 'IH0', 'NG']), 'rally': ('NN', ['R', 'AE1', 'L', 'IY0']), 'rallies': ('NNS', ['R', 'AE1', 'L', 'IY0', 'Z']), 'ralph': ('NN', ['R', 'AE1', 'L', 'F']), 'ram': ('NN', ['R', 'AE1', 'M']), 'rammed': ('VBN', ['R', 'AE1', 'M', 'D']), 'ramming': ('VBG', ['R', 'AE1', 'M', 'IH0', 'NG']), 'ramadan': ('NN', ['R', 'AE1', 'M', 'AH0', 'D', 'AH0', 'N']), 'ramage': ('NN', ['R', 'AE1', 'M', 'IH0', 'JH']), 'rambled': ('VBN', ['R', 'AE1', 'M', 'B', 'AH0', 'L', 'D']), 'rambling': ('VBG', ['R', 'AE1', 'M', 'B', 'L', 'IH0', 'NG']), 'ramble': ('JJ', ['R', 'AE1', 'M', 'B', 'AH0', 'L']), 'rambler': ('NN', ['R', 'AE1', 'M', 'B', 'L', 'ER0']), 'ramification': ('NN', ['R', 'AE2', 'M', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'ramify': ('VB', ['R', 'AE1', 'M', 'AH0', 'F', 'AY2']), 'rammel': ('NN', ['R', 'AE1', 'M', 'AH0', 'L']), 'rammer': ('NN', ['R', 'AE1', 'M', 'ER0']), 'ramping': ('VBG', ['R', 'AE1', 'M', 'P', 'IH0', 'NG']), 'ramp': ('NN', ['R', 'AE1', 'M', 'P']), 'rampage': ('NN', ['R', 'AE1', 'M', 'P', 'EY2', 'JH']), 'rampant': ('NN', ['R', 'AE1', 'M', 'P', 'AH0', 'N', 'T']), 'ramrod': ('NN', ['R', 'AE1', 'M', 'R', 'AA2', 'D']), 'ramshackle': ('NN', ['R', 'AE1', 'M', 'SH', 'AE2', 'K', 'AH0', 'L']), 'rami': ('NN', ['R', 'AA1', 'M', 'IY0']), 'ramus': ('NN', ['R', 'EY1', 'M', 'AH0', 'S']), 'ran': ('NN', ['R', 'AE1', 'N']), 'rana': ('NN', ['R', 'AE1', 'N', 'AH0']), 'rance': ('NN', ['R', 'AE1', 'N', 'S']), 'ranch': ('NN', ['R', 'AE1', 'N', 'CH']), 'ranchero': ('NN', ['R', 'AA0', 'N', 'CH', 'EH1', 'R', 'OW0']), 'rancho': ('NN', ['R', 'AE1', 'N', 'CH', 'OW0']), 'rancid': ('NN', ['R', 'AE1', 'N', 'S', 'IH0', 'D']), 'rancidity': ('NN', ['R', 'AE0', 'N', 'S', 'IH1', 'D', 'IH0', 'T', 'IY0']), 'rancor': ('NN', ['R', 'AE1', 'NG', 'K', 'ER0']), 'rancorous': ('JJ', ['R', 'AE1', 'NG', 'K', 'ER0', 'AH0', 'S']), 'rand': ('NN', ['R', 'AE1', 'N', 'D']), 'random': ('NN', ['R', 'AE1', 'N', 'D', 'AH0', 'M']), 'randomly': ('RB', ['R', 'AE1', 'N', 'D', 'AH0', 'M', 'L', 'IY0']), 'ranee': ('NN', ['R', 'AE1', 'N', 'IY1']), 'rang': ('NN', ['R', 'AE1', 'NG']), 'ranged': ('VBD', ['R', 'EY1', 'N', 'JH', 'D']), 'ranging': ('VBG', ['R', 'EY1', 'N', 'JH', 'IH0', 'NG']), 'range': ('NN', ['R', 'EY1', 'N', 'JH']), 'ranger': ('NN', ['R', 'EY1', 'N', 'JH', 'ER0']), 'rani': ('NN', ['R', 'AA1', 'N', 'IY0']), 'rank': ('NN', ['R', 'AE1', 'NG', 'K']), 'ranked': ('NNS', ['R', 'AE1', 'NG', 'K', 'T']), 'ranking': ('VBG', ['R', 'AE1', 'NG', 'K', 'IH0', 'NG']), 'ranker': ('NN', ['R', 'AE1', 'NG', 'K', 'ER0']), 'rankled': ('VBN', ['R', 'AE1', 'NG', 'K', 'AH0', 'L', 'D']), 'rankling': ('VBG', ['R', 'AE1', 'NG', 'K', 'L', 'IH0', 'NG']), 'rankle': ('NN', ['R', 'AE1', 'NG', 'K', 'AH0', 'L']), 'ransacked': ('VBN', ['R', 'AE1', 'N', 'S', 'AE2', 'K', 'T']), 'ransacking': ('VBG', ['R', 'AE1', 'N', 'S', 'AE2', 'K', 'IH0', 'NG']), 'ransack': ('NN', ['R', 'AE1', 'N', 'S', 'AE2', 'K']), 'ransom': ('NN', ['R', 'AE1', 'N', 'S', 'AH0', 'M']), 'ranted': ('VBN', ['R', 'AE1', 'N', 'T', 'AH0', 'D']), 'ranting': ('VBG', ['R', 'AE1', 'N', 'T', 'IH0', 'NG']), 'rant': ('NN', ['R', 'AE1', 'N', 'T']), 'rap': ('NN', ['R', 'AE1', 'P']), 'rapped': ('NN', ['R', 'AE1', 'P', 'T']), 'rapping': ('VBG', ['R', 'AE1', 'P', 'IH0', 'NG']), 'rapt': ('NN', ['R', 'AE1', 'P', 'T']), 'rapacious': ('JJ', ['R', 'AH0', 'P', 'AE1', 'SH', 'IH0', 'S']), 'rape': ('NN', ['R', 'EY1', 'P']), 'raphaelite': ('NN', ['R', 'AE0', 'F', 'Y', 'EH1', 'L', 'AY0', 'T']), 'rapid': ('JJ', ['R', 'AE1', 'P', 'AH0', 'D']), 'rapidity': ('NN', ['R', 'AH0', 'P', 'IH1', 'D', 'AH0', 'T', 'IY0']), 'rapidly': ('RB', ['R', 'AE1', 'P', 'AH0', 'D', 'L', 'IY0']), 'rapier': ('NN', ['R', 'EY1', 'P', 'IY0', 'ER0']), 'rapper': ('NN', ['R', 'AE1', 'P', 'ER0']), 'rapport': ('NN', ['R', 'AE0', 'P', 'AO1', 'R']), 'raptor': ('NN', ['R', 'AE1', 'P', 'T', 'ER0']), 'raptorial': ('NN', ['R', 'AE2', 'P', 'T', 'AO1', 'R', 'IY0', 'AH0', 'L']), 'rapture': ('NN', ['R', 'AE1', 'P', 'CH', 'ER0']), 'rapturous': ('JJ', ['R', 'AE1', 'P', 'CH', 'ER0', 'AH0', 'S']), 'rare': ('NN', ['R', 'EH1', 'R']), 'rarefied': ('VBN', ['R', 'EH1', 'R', 'AH0', 'F', 'AY0', 'D']), 'rarefy': ('NN', ['R', 'EH1', 'R', 'AH0', 'F', 'AY0']), 'rarely': ('RB', ['R', 'EH1', 'R', 'L', 'IY0']), 'rareness': ('NN', ['R', 'EH1', 'R', 'N', 'IH0', 'S']), 'rarities': ('NNS', ['R', 'EH1', 'R', 'IH0', 'T', 'IY0', 'Z']), 'rarity': ('NN', ['R', 'EH1', 'R', 'AH0', 'T', 'IY0']), 'ras': ('NNS', ['R', 'AE1', 'S']), 'rascal': ('JJ', ['R', 'AE1', 'S', 'K', 'AH0', 'L']), 'rase': ('NN', ['R', 'EY1', 'Z']), 'rash': ('NN', ['R', 'AE1', 'SH']), 'rasped': ('NN', ['R', 'AE1', 'S', 'P', 'T']), 'rasp': ('NN', ['R', 'AE1', 'S', 'P']), 'raspberry': ('NN', ['R', 'AE1', 'Z', 'B', 'EH2', 'R', 'IY0']), 'raspy': ('NN', ['R', 'AE1', 'S', 'P', 'IY0']), 'rat': ('NN', ['R', 'AE1', 'T']), 'rata': ('NN', ['R', 'AE1', 'T', 'AH0']), 'ratchet': ('NN', ['R', 'AE1', 'CH', 'AH0', 'T']), 'rate': ('NN', ['R', 'EY1', 'T']), 'rated': ('VBN', ['R', 'EY1', 'T', 'AH0', 'D']), 'rating': ('NN', ['R', 'EY1', 'T', 'IH0', 'NG']), 'ratepayer': ('NN', ['R', 'EY1', 'T', 'P', 'EY2', 'ER0']), 'rater': ('NN', ['R', 'EY1', 'T', 'ER0']), 'rath': ('NN', ['R', 'AE1', 'TH']), 'rathe': ('NN', ['R', 'EY1', 'DH']), 'rather': ('RB', ['R', 'AE1', 'DH', 'ER0']), 'ratification': ('NN', ['R', 'AE2', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'ratified': ('VBN', ['R', 'AE1', 'T', 'AH0', 'F', 'AY2', 'D']), 'ratifying': ('VBG', ['R', 'AE1', 'T', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'ratify': ('VB', ['R', 'AE1', 'T', 'AH0', 'F', 'AY2']), 'ratio': ('NN', ['R', 'EY1', 'SH', 'IY0', 'OW2']), 'ration': ('NN', ['R', 'AE1', 'SH', 'AH0', 'N']), 'rational': ('JJ', ['R', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'rationale': ('NN', ['R', 'AE2', 'SH', 'AH0', 'N', 'AE1', 'L']), 'rationality': ('NN', ['R', 'AE2', 'SH', 'AH0', 'N', 'AE1', 'L', 'IH0', 'T', 'IY0']), 'rationalization': ('NN', ['R', 'AE2', 'SH', 'AH0', 'N', 'AH0', 'L', 'IH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'rationalize': ('VB', ['R', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'AY2', 'Z']), 'rationally': ('RB', ['R', 'AE1', 'SH', 'AH0', 'N', 'AH0', 'L', 'IY0']), 'raton': ('NN', ['R', 'AH0', 'T', 'OW1', 'N']), 'rattan': ('NN', ['R', 'AE0', 'T', 'AE1', 'N']), 'rattled': ('VBN', ['R', 'AE1', 'T', 'AH0', 'L', 'D']), 'rattling': ('VBG', ['R', 'AE1', 'T', 'L', 'IH0', 'NG']), 'rattle': ('NN', ['R', 'AE1', 'T', 'AH0', 'L']), 'rattler': ('NN', ['R', 'AE1', 'T', 'AH0', 'L', 'ER0']), 'rattlesnake': ('NN', ['R', 'AE1', 'T', 'AH0', 'L', 'S', 'N', 'EY2', 'K']), 'raucous': ('JJ', ['R', 'AO1', 'K', 'AH0', 'S']), 'ravage': ('NN', ['R', 'AE1', 'V', 'IH0', 'JH']), 'ravaged': ('VBN', ['R', 'AE1', 'V', 'IH0', 'JH', 'D']), 'ravaging': ('VBG', ['R', 'AE1', 'V', 'IH0', 'JH', 'IH0', 'NG']), 'rave': ('VB', ['R', 'EY1', 'V']), 'raved': ('VBN', ['R', 'EY1', 'V', 'D']), 'raving': ('VBG', ['R', 'EY1', 'V', 'IH0', 'NG']), 'raveled': ('VBN', ['R', 'AE1', 'V', 'AH0', 'L', 'D']), 'raveling': ('VBG', ['R', 'AE1', 'V', 'AH0', 'L', 'IH0', 'NG']), 'ravel': ('NN', ['R', 'AE1', 'V', 'AH0', 'L']), 'raven': ('NN', ['R', 'EY1', 'V', 'AH0', 'N']), 'ravenous': ('JJ', ['R', 'AE1', 'V', 'AH0', 'N', 'AH0', 'S']), 'raver': ('NN', ['R', 'EY1', 'V', 'ER0']), 'ravin': ('NN', ['R', 'AE1', 'V', 'IH0', 'N']), 'ravine': ('NN', ['R', 'AH0', 'V', 'IY1', 'N']), 'ravishing': ('VBG', ['R', 'AE1', 'V', 'IH0', 'SH', 'IH0', 'NG']), 'raw': ('NN', ['R', 'AA1']), 'rawhide': ('NN', ['R', 'AO1', 'HH', 'AY2', 'D']), 'rawness': ('NN', ['R', 'AO1', 'N', 'IH0', 'S']), 'ray': ('NN', ['R', 'EY1']), 'rayed': ('NN', ['R', 'EY1', 'D']), 'rayon': ('NN', ['R', 'EY1', 'AH0', 'N']), 'raze': ('NN', ['R', 'EY1', 'Z']), 'razed': ('VBN', ['R', 'EY1', 'Z', 'D']), 'razing': ('VBG', ['R', 'EY1', 'Z', 'IH0', 'NG']), 'razor': ('NN', ['R', 'EY1', 'Z', 'ER0']), 'razorback': ('NN', ['R', 'EY1', 'Z', 'ER0', 'B', 'AE2', 'K']), 're': ('NN', ['R', 'EY1']), 'reabsorb': ('NN', ['R', 'IY2', 'AH0', 'B', 'Z', 'AO1', 'R', 'B']), 'reach': ('NN', ['R', 'IY1', 'CH']), 'reached': ('VBN', ['R', 'IY1', 'CH', 'T']), 'reaching': ('VBG', ['R', 'IY1', 'CH', 'IH0', 'NG']), 'reachable': ('JJ', ['R', 'IY1', 'CH', 'AH0', 'B', 'AH0', 'L']), 'react': ('NN', ['R', 'IY0', 'AE1', 'K', 'T']), 'reaction': ('NN', ['R', 'IY0', 'AE1', 'K', 'SH', 'AH0', 'N']), 'reactionary': ('NN', ['R', 'IY0', 'AE1', 'K', 'SH', 'AH0', 'N', 'EH2', 'R', 'IY0']), 'reactionaries': ('NNS', ['R', 'IY0', 'AE1', 'K', 'SH', 'AH0', 'N', 'EH2', 'R', 'IY0', 'Z']), 'reactive': ('NN', ['R', 'IY0', 'AE1', 'K', 'T', 'IH0', 'V']), 'read': ('NN', ['R', 'EH1', 'D']), 'reading': ('NN', ['R', 'EH1', 'D', 'IH0', 'NG']), 'readability': ('NN', ['R', 'IY2', 'D', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'readable': ('JJ', ['R', 'IY1', 'D', 'AH0', 'B', 'AH0', 'L']), 'reader': ('NN', ['R', 'IY1', 'D', 'ER0']), 'readership': ('NN', ['R', 'IY1', 'D', 'ER0', 'SH', 'IH2', 'P']), 'readily': ('RB', ['R', 'EH1', 'D', 'AH0', 'L', 'IY0']), 'readiness': ('NN', ['R', 'EH1', 'D', 'IY0', 'N', 'AH0', 'S']), 'readjust': ('NN', ['R', 'IY2', 'AH0', 'JH', 'AH1', 'S', 'T']), 'readjustment': ('NN', ['R', 'IY0', 'AH0', 'JH', 'AH1', 'S', 'T', 'M', 'AH0', 'N', 'T']), 'readmit': ('NN', ['R', 'IY2', 'AH0', 'D', 'M', 'IH1', 'T']), 'ready': ('JJ', ['R', 'EH1', 'D', 'IY0']), 'reaffirm': ('NN', ['R', 'IY2', 'AH0', 'F', 'ER1', 'M']), 'reaffirmation': ('NN', ['R', 'IY2', 'AE0', 'F', 'ER0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'reagent': ('NN', ['R', 'IY0', 'EY1', 'JH', 'AH0', 'N', 'T']), 'real': ('JJ', ['R', 'IY1', 'L']), 'realism': ('NN', ['R', 'IY1', 'L', 'IH0', 'Z', 'AH0', 'M']), 'realist': ('NN', ['R', 'IY1', 'L', 'IH0', 'S', 'T']), 'realistic': ('JJ', ['R', 'IY2', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K']), 'realistically': ('RB', ['R', 'IY2', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K', 'L', 'IY0']), 'realities': ('NNS', ['R', 'IY0', 'AE1', 'L', 'AH0', 'T', 'IY0', 'Z']), 'reality': ('NN', ['R', 'IY0', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'realizable': ('JJ', ['R', 'IY2', 'AH0', 'L', 'AY1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'realization': ('NN', ['R', 'IY1', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'realized': ('VBN', ['R', 'IY1', 'L', 'AY2', 'Z', 'D']), 'realizing': ('VBG', ['R', 'IY1', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'realize': ('VB', ['R', 'IY1', 'L', 'AY2', 'Z']), 'really': ('RB', ['R', 'IH1', 'L', 'IY0']), 'realm': ('NN', ['R', 'EH1', 'L', 'M']), 'realty': ('NN', ['R', 'IY1', 'AH0', 'L', 'T', 'IY0']), 'ream': ('NN', ['R', 'IY1', 'M']), 'reamer': ('NN', ['R', 'IY1', 'M', 'ER0']), 'reaped': ('NN', ['R', 'IY1', 'P', 'T']), 'reaping': ('VBG', ['R', 'IY1', 'P', 'IH0', 'NG']), 'reap': ('NN', ['R', 'IY1', 'P']), 'reaper': ('NN', ['R', 'IY1', 'P', 'ER0']), 'reappear': ('NN', ['R', 'IY2', 'AH0', 'P', 'IH1', 'R']), 'reappearance': ('NN', ['R', 'IY2', 'AH0', 'P', 'IH1', 'R', 'AH0', 'N', 'S']), 'reapply': ('NN', ['R', 'IY2', 'AH0', 'P', 'L', 'IY0']), 'reappoint': ('NN', ['R', 'IY2', 'AH0', 'P', 'OY1', 'N', 'T']), 'reappointment': ('NN', ['R', 'IY2', 'AH0', 'P', 'OY1', 'N', 'T', 'M', 'AH0', 'N', 'T']), 'reapportionment': ('NN', ['R', 'IY2', 'AH0', 'P', 'AO1', 'R', 'SH', 'AH0', 'N', 'M', 'AH0', 'N', 'T']), 'rear': ('NN', ['R', 'IH1', 'R']), 'reared': ('VBN', ['R', 'IH1', 'R', 'D']), 'rearing': ('VBG', ['R', 'IH1', 'R', 'IH0', 'NG']), 'rearrange': ('NN', ['R', 'IY2', 'ER0', 'EY1', 'N', 'JH']), 'rearrangement': ('NN', ['R', 'IY0', 'ER0', 'EY1', 'N', 'JH', 'M', 'AH0', 'N', 'T']), 'reason': ('NN', ['R', 'IY1', 'Z', 'AH0', 'N']), 'reasoned': ('VBN', ['R', 'IY1', 'Z', 'AH0', 'N', 'D']), 'reasoning': ('VBG', ['R', 'IY1', 'Z', 'AH0', 'N', 'IH0', 'NG']), 'reasonable': ('JJ', ['R', 'IY1', 'Z', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'reasonableness': ('NN', ['R', 'IY1', 'Z', 'AH0', 'N', 'AH0', 'B', 'AH2', 'L', 'N', 'AH0', 'S']), 'reasonably': ('RB', ['R', 'IY1', 'Z', 'AH0', 'N', 'AH0', 'B', 'L', 'IY0']), 'reasoner': ('NN', ['R', 'IY1', 'Z', 'AH0', 'N', 'ER0']), 'reassemble': ('JJ', ['R', 'IY2', 'AH0', 'S', 'EH1', 'M', 'B', 'AH0', 'L']), 'reassert': ('NN', ['R', 'IY2', 'AH0', 'S', 'ER1', 'T']), 'reassertion': ('NN', ['R', 'IY2', 'AH0', 'S', 'ER1', 'SH', 'AH0', 'N']), 'reassessment': ('NN', ['R', 'IY2', 'AH0', 'S', 'EH1', 'S', 'M', 'AH0', 'N', 'T']), 'reassign': ('NN', ['R', 'IY2', 'AH0', 'S', 'AY1', 'N']), 'reassignment': ('NN', ['R', 'IY0', 'AH0', 'S', 'AY1', 'N', 'M', 'AH0', 'N', 'T']), 'reassume': ('NN', ['R', 'IY0', 'AH0', 'S', 'UW1', 'M']), 'reassurance': ('NN', ['R', 'IY2', 'AH0', 'SH', 'UH1', 'R', 'AH0', 'N', 'S']), 'reassure': ('NN', ['R', 'IY2', 'AH0', 'SH', 'UH1', 'R']), 'reattach': ('NN', ['R', 'IY1', 'AH0', 'T', 'AE1', 'CH']), 'reaume': ('NN', ['R', 'UW1', 'M']), 'reave': ('VB', ['R', 'IY1', 'V']), 'reaver': ('NN', ['R', 'IY1', 'V', 'ER0']), 'rebate': ('NN', ['R', 'IY1', 'B', 'EY2', 'T']), 'rebel': ('NN', ['R', 'EH1', 'B', 'AH0', 'L']), 'rebelled': ('VBN', ['R', 'IH0', 'B', 'EH1', 'L', 'D']), 'rebelling': ('VBG', ['R', 'IH0', 'B', 'EH1', 'L', 'IH0', 'NG']), 'rebellion': ('NN', ['R', 'IH0', 'B', 'EH1', 'L', 'Y', 'AH0', 'N']), 'rebellious': ('JJ', ['R', 'IH0', 'B', 'EH1', 'L', 'Y', 'AH0', 'S']), 'reborn': ('NN', ['R', 'IY1', 'B', 'AO1', 'R', 'N']), 'rebound': ('NN', ['R', 'IY0', 'B', 'AW1', 'N', 'D']), 'rebuff': ('NN', ['R', 'IH0', 'B', 'AH1', 'F']), 'rebuffed': ('NN', ['R', 'IH0', 'B', 'AH1', 'F', 'T']), 'rebuffing': ('VBG', ['R', 'IY0', 'B', 'AH1', 'F', 'IH0', 'NG']), 'rebuild': ('NN', ['R', 'IY0', 'B', 'IH1', 'L', 'D']), 'rebuilder': ('NN', ['R', 'IY0', 'B', 'IH1', 'L', 'D', 'ER0']), 'rebuked': ('VBN', ['R', 'IH0', 'B', 'Y', 'UW1', 'K', 'T']), 'rebuking': ('VBG', ['R', 'IY0', 'B', 'Y', 'UW1', 'K', 'IH0', 'NG']), 'rebuke': ('NN', ['R', 'IY0', 'B', 'Y', 'UW1', 'K']), 'rebus': ('NN', ['R', 'IY1', 'B', 'AH0', 'S']), 'rebutted': ('VBN', ['R', 'IH0', 'B', 'AH1', 'T', 'IH0', 'D']), 'rebutting': ('VBG', ['R', 'IH0', 'B', 'AH1', 'T', 'IH0', 'NG']), 'rebut': ('NN', ['R', 'IH0', 'B', 'AH1', 'T']), 'rebuttal': ('NN', ['R', 'IH0', 'B', 'AH1', 'T', 'AH0', 'L']), 'recalcitrant': ('NN', ['R', 'IH0', 'K', 'AE1', 'L', 'S', 'IH0', 'T', 'R', 'AH0', 'N', 'T']), 'recall': ('NN', ['R', 'IY1', 'K', 'AO2', 'L']), 'recanted': ('VBN', ['R', 'IY0', 'K', 'AE1', 'N', 'T', 'IH0', 'D']), 'recanting': ('VBG', ['R', 'AH0', 'K', 'AE1', 'N', 'T', 'IH0', 'NG']), 'recant': ('NN', ['R', 'IY0', 'K', 'AE1', 'N', 'T']), 'recantation': ('NN', ['R', 'EH2', 'K', 'AH0', 'N', 'T', 'EY1', 'SH', 'AH0', 'N']), 'recapitulate': ('NN', ['R', 'IY2', 'K', 'AH0', 'P', 'IH1', 'CH', 'AH0', 'L', 'EY2', 'T']), 'recapture': ('NN', ['R', 'IY0', 'K', 'AE1', 'P', 'CH', 'ER0']), 'recast': ('NN', ['R', 'IY0', 'K', 'AE1', 'S', 'T']), 'receded': ('VBN', ['R', 'AH0', 'S', 'IY1', 'D', 'AH0', 'D']), 'receding': ('VBG', ['R', 'IH0', 'S', 'IY1', 'D', 'IH0', 'NG']), 'recede': ('NN', ['R', 'IH0', 'S', 'IY1', 'D']), 'receipt': ('NN', ['R', 'IH0', 'S', 'IY1', 'T']), 'receivable': ('NN', ['R', 'IH0', 'S', 'IY1', 'V', 'AH0', 'B', 'AH0', 'L']), 'received': ('VBN', ['R', 'AH0', 'S', 'IY1', 'V', 'D']), 'receiving': ('VBG', ['R', 'AH0', 'S', 'IY1', 'V', 'IH0', 'NG']), 'receive': ('NN', ['R', 'AH0', 'S', 'IY1', 'V']), 'receiver': ('NN', ['R', 'AH0', 'S', 'IY1', 'V', 'ER0']), 'receivership': ('NN', ['R', 'IH0', 'S', 'IY1', 'V', 'ER0', 'SH', 'IH2', 'P']), 'recent': ('JJ', ['R', 'IY1', 'S', 'AH0', 'N', 'T']), 'recently': ('RB', ['R', 'IY1', 'S', 'AH0', 'N', 'T', 'L', 'IY0']), 'receptacle': ('NN', ['R', 'AH0', 'S', 'EH1', 'P', 'T', 'AH0', 'K', 'AH0', 'L']), 'reception': ('NN', ['R', 'IH0', 'S', 'EH1', 'P', 'SH', 'AH0', 'N']), 'receptive': ('NN', ['R', 'IH0', 'S', 'EH1', 'P', 'T', 'IH0', 'V']), 'receptivity': ('NN', ['R', 'IY1', 'S', 'EH2', 'P', 'T', 'IH1', 'V', 'IH0', 'T', 'IY0']), 'recess': ('NN', ['R', 'IH0', 'S', 'EH1', 'S']), 'recessed': ('VBN', ['R', 'IH0', 'S', 'EH1', 'S', 'T']), 'recessing': ('VBG', ['R', 'IY2', 'S', 'EH1', 'S', 'IH0', 'NG']), 'recession': ('NN', ['R', 'IH0', 'S', 'EH1', 'SH', 'AH0', 'N']), 'recessive': ('NN', ['R', 'AH0', 'S', 'EH1', 'S', 'IH0', 'V']), 'recharge': ('NN', ['R', 'IY0', 'CH', 'AA1', 'R', 'JH']), 'recipes': ('NNS', ['R', 'EH1', 'S', 'AH0', 'P', 'IY0', 'Z']), 'recipe': ('NN', ['R', 'EH1', 'S', 'AH0', 'P', 'IY0']), 'recipient': ('NN', ['R', 'AH0', 'S', 'IH1', 'P', 'IY0', 'AH0', 'N', 'T']), 'reciprocal': ('JJ', ['R', 'IH0', 'S', 'IH1', 'P', 'R', 'AH0', 'K', 'AH0', 'L']), 'reciprocated': ('VBN', ['R', 'IH0', 'S', 'IH1', 'P', 'R', 'AH0', 'K', 'EY2', 'T', 'IH0', 'D']), 'reciprocating': ('VBG', ['R', 'IH0', 'S', 'IH1', 'P', 'R', 'AH0', 'K', 'EY2', 'T', 'IH0', 'NG']), 'reciprocate': ('NN', ['R', 'IH0', 'S', 'IH1', 'P', 'R', 'AH0', 'K', 'EY2', 'T']), 'reciprocity': ('NN', ['R', 'EH2', 'S', 'IH0', 'P', 'R', 'AA1', 'S', 'IH0', 'T', 'IY0']), 'recision': ('NN', ['R', 'IH0', 'S', 'IH1', 'ZH', 'AH0', 'N']), 'recital': ('NN', ['R', 'AH0', 'S', 'AY1', 'T', 'AH0', 'L']), 'recitation': ('NN', ['R', 'EH2', 'S', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'recited': ('VBN', ['R', 'AH0', 'S', 'AY1', 'T', 'AH0', 'D']), 'reciting': ('VBG', ['R', 'IY0', 'S', 'AY1', 'T', 'IH0', 'NG']), 'recite': ('NN', ['R', 'AH0', 'S', 'AY1', 'T']), 'reck': ('NN', ['R', 'EH1', 'K']), 'reckless': ('NN', ['R', 'EH1', 'K', 'L', 'AH0', 'S']), 'reckoned': ('VBN', ['R', 'EH1', 'K', 'AH0', 'N', 'D']), 'reckoning': ('VBG', ['R', 'EH1', 'K', 'AH0', 'N', 'IH0', 'NG']), 'reckon': ('NN', ['R', 'EH1', 'K', 'AH0', 'N']), 'reclaim': ('NN', ['R', 'IY0', 'K', 'L', 'EY1', 'M']), 'reclaimed': ('VBN', ['R', 'IY0', 'K', 'L', 'EY1', 'M', 'D']), 'reclaiming': ('VBG', ['R', 'IY0', 'K', 'L', 'EY1', 'M', 'IH0', 'NG']), 'reclaimer': ('NN', ['R', 'IY0', 'K', 'L', 'EY1', 'M', 'ER0']), 'reclamation': ('NN', ['R', 'EH2', 'K', 'L', 'AH0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'reclined': ('VBN', ['R', 'IH0', 'K', 'L', 'AY1', 'N', 'D']), 'reclining': ('VBG', ['R', 'IH0', 'K', 'L', 'AY1', 'N', 'IH0', 'NG']), 'recline': ('NN', ['R', 'IH0', 'K', 'L', 'AY1', 'N']), 'recliner': ('NN', ['R', 'IH0', 'K', 'L', 'AY1', 'N', 'ER0']), 'recluse': ('NN', ['R', 'IH0', 'K', 'L', 'UW1', 'S']), 'reclusive': ('NN', ['R', 'IH0', 'K', 'L', 'UW1', 'S', 'IH0', 'V']), 'recognition': ('NN', ['R', 'EH2', 'K', 'AH0', 'G', 'N', 'IH1', 'SH', 'AH0', 'N']), 'recognizable': ('JJ', ['R', 'EH2', 'K', 'AH0', 'G', 'N', 'AY1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'recognizance': ('NN', ['R', 'IH0', 'K', 'AA1', 'N', 'AH0', 'Z', 'AH0', 'N', 'S']), 'recognized': ('VBN', ['R', 'EH1', 'K', 'AH0', 'G', 'N', 'AY2', 'Z', 'D']), 'recognizing': ('VBG', ['R', 'EH1', 'K', 'AH0', 'G', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'recognize': ('VB', ['R', 'EH1', 'K', 'AH0', 'G', 'N', 'AY2', 'Z']), 'recoiled': ('VBN', ['R', 'IY0', 'K', 'OY1', 'L', 'D']), 'recoil': ('NN', ['R', 'IY0', 'K', 'OY1', 'L']), 'recollected': ('VBN', ['R', 'EH2', 'K', 'AH0', 'L', 'EH1', 'K', 'T', 'IH0', 'D']), 'recollecting': ('VBG', ['R', 'EH2', 'K', 'AH0', 'L', 'EH1', 'K', 'T', 'IH0', 'NG']), 'recollect': ('NN', ['R', 'EH2', 'K', 'AH0', 'L', 'EH1', 'K', 'T']), 'recollection': ('NN', ['R', 'EH2', 'K', 'AH0', 'L', 'EH1', 'K', 'SH', 'AH0', 'N']), 'recombine': ('NN', ['R', 'IY2', 'K', 'AH0', 'M', 'B', 'AY1', 'N']), 'recommended': ('VBN', ['R', 'EH2', 'K', 'AH0', 'M', 'EH1', 'N', 'D', 'AH0', 'D']), 'recommending': ('VBG', ['R', 'EH2', 'K', 'AH0', 'M', 'EH1', 'N', 'D', 'IH0', 'NG']), 'recommend': ('NN', ['R', 'EH2', 'K', 'AH0', 'M', 'EH1', 'N', 'D']), 'recommendation': ('NN', ['R', 'EH2', 'K', 'AH0', 'M', 'AH0', 'N', 'D', 'EY1', 'SH', 'AH0', 'N']), 'recommit': ('NN', ['R', 'IH0', 'K', 'AA1', 'M', 'IH0', 'T']), 'recompense': ('NN', ['R', 'EH1', 'K', 'AH0', 'M', 'P', 'EH2', 'N', 'S']), 'reconciled': ('VBN', ['R', 'EH1', 'K', 'AH0', 'N', 'S', 'AY2', 'L', 'D']), 'reconciling': ('VBG', ['R', 'EH1', 'K', 'AH0', 'N', 'S', 'AY2', 'L', 'IH0', 'NG']), 'reconcile': ('NN', ['R', 'EH1', 'K', 'AH0', 'N', 'S', 'AY2', 'L']), 'reconciliation': ('NN', ['R', 'EH2', 'K', 'AH0', 'N', 'S', 'IH2', 'L', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'recondite': ('NN', ['R', 'EH1', 'K', 'AH0', 'N', 'D', 'AY2', 'T']), 'reconfirm': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'F', 'ER1', 'M']), 'reconnaissance': ('NN', ['R', 'IY0', 'K', 'AA1', 'N', 'AH0', 'S', 'AH0', 'N', 'S']), 'reconnoiter': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'OY1', 'T', 'ER0']), 'reconquer': ('NN', ['R', 'IY0', 'K', 'AO1', 'NG', 'K', 'ER0']), 'reconsider': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'S', 'IH1', 'D', 'ER0']), 'reconsideration': ('NN', ['R', 'IY0', 'K', 'AH0', 'N', 'S', 'IH2', 'D', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'reconstruct': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'S', 'T', 'R', 'AH1', 'K', 'T']), 'reconstruction': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'S', 'T', 'R', 'AH1', 'K', 'SH', 'AH0', 'N']), 'reconstructive': ('NN', ['R', 'IY2', 'K', 'AH0', 'N', 'S', 'T', 'R', 'AH1', 'K', 'T', 'IH0', 'V']), 'reconvene': ('NN', ['R', 'IY0', 'K', 'AH0', 'N', 'V', 'IY1', 'N']), 'recorded': ('VBN', ['R', 'AH0', 'K', 'AO1', 'R', 'D', 'AH0', 'D']), 'recording': ('VBG', ['R', 'AH0', 'K', 'AO1', 'R', 'D', 'IH0', 'NG']), 'record': ('NN', ['R', 'AH0', 'K', 'AO1', 'R', 'D']), 'recorder': ('NN', ['R', 'IH0', 'K', 'AO1', 'R', 'D', 'ER0']), 'recount': ('NN', ['R', 'IH0', 'K', 'AW1', 'N', 'T']), 'recoup': ('NN', ['R', 'IH0', 'K', 'UW1', 'P']), 'recourse': ('NN', ['R', 'IY1', 'K', 'AO0', 'R', 'S']), 'recover': ('NN', ['R', 'IH0', 'K', 'AH1', 'V', 'ER0']), 'recovered': ('VBN', ['R', 'AH0', 'K', 'AH1', 'V', 'ER0', 'D']), 'recovering': ('VBG', ['R', 'AH0', 'K', 'AH1', 'V', 'ER0', 'IH0', 'NG']), 'recoverable': ('JJ', ['R', 'IH0', 'K', 'AH1', 'V', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'recovery': ('NN', ['R', 'IH0', 'K', 'AH1', 'V', 'R', 'IY0']), 'recreated': ('VBN', ['R', 'EH1', 'K', 'R', 'IY0', 'EY2', 'T', 'IH0', 'D']), 'recreating': ('VBG', ['R', 'EH1', 'K', 'R', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'recreate': ('NN', ['R', 'EH1', 'K', 'R', 'IY0', 'EY2', 'T']), 'recreation': ('NN', ['R', 'EH2', 'K', 'R', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'recriminate': ('NN', ['R', 'IH0', 'K', 'R', 'IH2', 'M', 'IH0', 'N', 'EY1', 'T']), 'recrimination': ('NN', ['R', 'IH0', 'K', 'R', 'IH2', 'M', 'IH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'recross': ('NN', ['R', 'IY0', 'K', 'R', 'AO1', 'S']), 'recruited': ('VBN', ['R', 'IH0', 'K', 'R', 'UW1', 'T', 'IH0', 'D']), 'recruiting': ('NN', ['R', 'AH0', 'K', 'R', 'UW1', 'T', 'IH0', 'NG']), 'recruit': ('NN', ['R', 'AH0', 'K', 'R', 'UW1', 'T']), 'recruiter': ('NN', ['R', 'IH0', 'K', 'R', 'UW1', 'T', 'ER0']), 'recruitment': ('NN', ['R', 'AH0', 'K', 'R', 'UW1', 'T', 'M', 'AH0', 'N', 'T']), 'rectal': ('NN', ['R', 'EH1', 'K', 'T', 'AH0', 'L']), 'rectangle': ('NN', ['R', 'EH1', 'K', 'T', 'AE0', 'NG', 'G', 'AH0', 'L']), 'rectangular': ('NN', ['R', 'EH0', 'K', 'T', 'AE1', 'NG', 'G', 'Y', 'AH0', 'L', 'ER0']), 'rectification': ('NN', ['R', 'EH2', 'K', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'rectifier': ('NN', ['R', 'EH1', 'K', 'T', 'AH0', 'F', 'AY2', 'ER0']), 'rectified': ('VBN', ['R', 'EH1', 'K', 'T', 'AH0', 'F', 'AY2', 'D']), 'rectifying': ('VBG', ['R', 'EH1', 'K', 'T', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'rectify': ('VB', ['R', 'EH1', 'K', 'T', 'AH0', 'F', 'AY2']), 'rectitude': ('NN', ['R', 'EH1', 'K', 'T', 'IH0', 'T', 'UW2', 'D']), 'rector': ('NN', ['R', 'EH1', 'K', 'T', 'ER0']), 'rectory': ('NN', ['R', 'EH1', 'K', 'T', 'ER0', 'IY0']), 'rectum': ('NN', ['R', 'EH1', 'K', 'T', 'AH0', 'M']), 'recuperated': ('VBN', ['R', 'IH0', 'K', 'UW1', 'P', 'ER0', 'EY2', 'T', 'AH0', 'D']), 'recuperating': ('VBG', ['R', 'IH0', 'K', 'UW1', 'P', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'recuperate': ('NN', ['R', 'IH0', 'K', 'UW1', 'P', 'ER0', 'EY2', 'T']), 'recuperation': ('NN', ['R', 'IH0', 'K', 'UW2', 'P', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'recuperative': ('NN', ['R', 'IH0', 'K', 'UW1', 'P', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'recurred': ('VBN', ['R', 'IY0', 'K', 'ER1', 'D']), 'recurring': ('VBG', ['R', 'IH0', 'K', 'ER1', 'IH0', 'NG']), 'recur': ('NN', ['R', 'IH0', 'K', 'ER1']), 'recurrence': ('NN', ['R', 'IH0', 'K', 'ER1', 'AH0', 'N', 'S']), 'recurrent': ('NN', ['R', 'IH0', 'K', 'ER1', 'AH0', 'N', 'T']), 'recuse': ('NN', ['R', 'IH2', 'K', 'Y', 'UW1', 'Z']), 'red': ('JJ', ['R', 'EH1', 'D']), 'redact': ('NN', ['R', 'IH0', 'D', 'AE1', 'K', 'T']), 'redbird': ('NN', ['R', 'EH1', 'D', 'B', 'ER2', 'D']), 'redbud': ('NN', ['R', 'EH1', 'D', 'B', 'AH2', 'D']), 'redcoat': ('NN', ['R', 'EH1', 'D', 'K', 'OW2', 'T']), 'redden': ('NN', ['R', 'EH1', 'D', 'AH0', 'N']), 'reddish': ('NN', ['R', 'EH1', 'D', 'IH0', 'SH']), 'redeemed': ('VBN', ['R', 'IH0', 'D', 'IY1', 'M', 'D']), 'redeeming': ('VBG', ['R', 'IH0', 'D', 'IY1', 'M', 'IH0', 'NG']), 'redeem': ('NN', ['R', 'IH0', 'D', 'IY1', 'M']), 'redeemable': ('JJ', ['R', 'IH0', 'D', 'IY1', 'M', 'AH0', 'B', 'AH0', 'L']), 'redemptive': ('NN', ['R', 'IH0', 'D', 'EH1', 'M', 'P', 'T', 'IH0', 'V']), 'redeye': ('NN', ['R', 'EH1', 'D', 'AY0']), 'redfish': ('NN', ['R', 'EH1', 'D', 'F', 'IH2', 'SH']), 'red-handed': ('JJ', ['R', 'EH1', 'D', 'HH', 'AE1', 'N', 'D', 'AH0', 'D']), 'redhead': ('NN', ['R', 'EH1', 'D', 'HH', 'EH2', 'D']), 'redirect': ('NN', ['R', 'IY2', 'D', 'ER0', 'EH1', 'K', 'T']), 'rediscover': ('NN', ['R', 'IY0', 'D', 'IH0', 'S', 'K', 'AH1', 'V', 'ER0']), 'redistribute': ('NN', ['R', 'IY2', 'D', 'IH0', 'S', 'T', 'R', 'IH1', 'B', 'Y', 'UW0', 'T']), 'redness': ('NN', ['R', 'EH1', 'D', 'N', 'AH0', 'S']), 'redolent': ('NN', ['R', 'EH1', 'D', 'AH0', 'L', 'AH0', 'N', 'T']), 'redouble': ('JJ', ['R', 'IY0', 'D', 'AH1', 'B', 'AH0', 'L']), 'redoubt': ('NN', ['R', 'AH0', 'D', 'AW1', 'T']), 'redoubtable': ('JJ', ['R', 'AH0', 'D', 'AW1', 'T', 'AH0', 'B', 'AH0', 'L']), 'redound': ('NN', ['R', 'IH0', 'D', 'AW1', 'N', 'D']), 'redpoll': ('NN', ['R', 'EH1', 'D', 'P', 'OW2', 'L']), 'redraft': ('NN', ['R', 'IY0', 'D', 'R', 'AE1', 'F', 'T']), 'redrawn': ('NN', ['R', 'IY0', 'D', 'R', 'AO1', 'N']), 'redrawing': ('VBG', ['R', 'IY0', 'D', 'R', 'AO1', 'IH0', 'NG']), 'redraw': ('NN', ['R', 'IY0', 'D', 'R', 'AO1']), 'redress': ('NN', ['R', 'IH0', 'D', 'R', 'EH1', 'S']), 'redskin': ('NN', ['R', 'EH1', 'D', 'S', 'K', 'IH2', 'N']), 'reduced': ('VBN', ['R', 'AH0', 'D', 'UW1', 'S', 'T']), 'reducing': ('VBG', ['R', 'AH0', 'D', 'UW1', 'S', 'IH0', 'NG']), 'reduce': ('VB', ['R', 'AH0', 'D', 'UW1', 'S']), 'reducible': ('JJ', ['R', 'AH0', 'D', 'UW1', 'S', 'AH0', 'B', 'AH0', 'L']), 'reduction': ('NN', ['R', 'AH0', 'D', 'AH1', 'K', 'SH', 'AH0', 'N']), 'redundancy': ('NN', ['R', 'IH0', 'D', 'AH1', 'N', 'D', 'AH0', 'N', 'S', 'IY0']), 'redundant': ('NN', ['R', 'IH0', 'D', 'AH1', 'N', 'D', 'AH0', 'N', 'T']), 'redwood': ('NN', ['R', 'EH1', 'D', 'W', 'UH2', 'D']), 'ree': ('NN', ['R', 'IY1']), 'reebok': ('NN', ['R', 'IY1', 'B', 'AA0', 'K']), 'reed': ('NN', ['R', 'IY1', 'D']), 'reedy': ('NN', ['R', 'IY1', 'D', 'IY0']), 'reef': ('NN', ['R', 'IY1', 'F']), 'reefer': ('NN', ['R', 'IY1', 'F', 'ER0']), 'reek': ('NN', ['R', 'IY1', 'K']), 'reeking': ('VBG', ['R', 'IY1', 'K', 'IH0', 'NG']), 'reel': ('NN', ['R', 'IY1', 'L']), 'reeled': ('VBN', ['R', 'IY1', 'L', 'D']), 'reeling': ('VBG', ['R', 'IY1', 'L', 'IH0', 'NG']), 'reelect': ('NN', ['R', 'IY0', 'IH0', 'L', 'EH1', 'K', 'T']), 'reelection': ('NN', ['R', 'IY0', 'IH0', 'L', 'EH1', 'K', 'SH', 'AH0', 'N']), 'reemerge': ('NN', ['R', 'IY0', 'IH0', 'M', 'ER1', 'JH']), 'reemergence': ('NN', ['R', 'IY0', 'IH0', 'M', 'ER1', 'JH', 'AH0', 'N', 'S']), 'reenact': ('NN', ['R', 'IY0', 'IH0', 'N', 'AE1', 'K', 'T']), 'reenactment': ('NN', ['R', 'IY0', 'IH0', 'N', 'AE1', 'K', 'T', 'M', 'AH0', 'N', 'T']), 'reenter': ('NN', ['R', 'IY0', 'IH1', 'N', 'T', 'ER0']), 'reentering': ('VBG', ['R', 'IY0', 'IH1', 'N', 'T', 'ER0', 'IH0', 'NG']), 'reentry': ('NN', ['R', 'IY0', 'IH1', 'N', 'T', 'R', 'IY0']), 'reestablish': ('NN', ['R', 'IY0', 'IH0', 'S', 'T', 'AE1', 'B', 'L', 'IH0', 'SH']), 'reeve': ('NN', ['R', 'IY1', 'V']), 'rove': ('VB', ['R', 'OW1', 'V']), 'reexamination': ('NN', ['R', 'IY0', 'IH0', 'G', 'Z', 'AE2', 'M', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'reexamine': ('NN', ['R', 'IY0', 'EH0', 'G', 'Z', 'AE1', 'M', 'AH0', 'N']), 'reexport': ('NN', ['R', 'IY0', 'EH1', 'K', 'S', 'P', 'AO2', 'R', 'T']), 'referred': ('VBN', ['R', 'AH0', 'F', 'ER1', 'D']), 'referring': ('VBG', ['R', 'IH0', 'F', 'ER1', 'IH0', 'NG']), 'refer': ('NN', ['R', 'AH0', 'F', 'ER1']), 'referee': ('NN', ['R', 'EH2', 'F', 'ER0', 'IY1']), 'reference': ('NN', ['R', 'EH1', 'F', 'ER0', 'AH0', 'N', 'S']), 'referendum': ('NN', ['R', 'EH2', 'F', 'ER0', 'EH1', 'N', 'D', 'AH0', 'M']), 'refill': ('NN', ['R', 'IY1', 'F', 'IH0', 'L']), 'refined': ('VBN', ['R', 'AH0', 'F', 'AY1', 'N', 'D']), 'refining': ('NN', ['R', 'AH0', 'F', 'AY1', 'N', 'IH0', 'NG']), 'refine': ('NN', ['R', 'AH0', 'F', 'AY1', 'N']), 'refinement': ('NN', ['R', 'AH0', 'F', 'AY1', 'N', 'M', 'AH0', 'N', 'T']), 'refiner': ('NN', ['R', 'IH0', 'F', 'AY1', 'N', 'ER0']), 'refineries': ('NNS', ['R', 'IH0', 'F', 'AY1', 'N', 'ER0', 'IY0', 'Z']), 'refinery': ('NN', ['R', 'IH0', 'F', 'AY1', 'N', 'ER0', 'IY0']), 'refit': ('NN', ['R', 'IY0', 'F', 'IH1', 'T']), 'reflected': ('VBN', ['R', 'AH0', 'F', 'L', 'EH1', 'K', 'T', 'AH0', 'D']), 'reflecting': ('VBG', ['R', 'AH0', 'F', 'L', 'EH1', 'K', 'T', 'IH0', 'NG']), 'reflect': ('NN', ['R', 'AH0', 'F', 'L', 'EH1', 'K', 'T']), 'reflection': ('NN', ['R', 'AH0', 'F', 'L', 'EH1', 'K', 'SH', 'AH0', 'N']), 'reflective': ('NN', ['R', 'IH0', 'F', 'L', 'EH1', 'K', 'T', 'IH0', 'V']), 'reflector': ('NN', ['R', 'IH0', 'F', 'L', 'EH1', 'K', 'T', 'ER0']), 'reflex': ('NN', ['R', 'IY1', 'F', 'L', 'EH0', 'K', 'S']), 'reflexive': ('NN', ['R', 'AH0', 'F', 'L', 'EH1', 'K', 'S', 'IH0', 'V']), 'reform': ('NN', ['R', 'AH0', 'F', 'AO1', 'R', 'M']), 'reformation': ('NN', ['R', 'EH2', 'F', 'ER0', 'M', 'EY1', 'SH', 'AH0', 'N']), 'reformatory': ('NN', ['R', 'IH0', 'F', 'AO1', 'R', 'M', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'reformed': ('VBN', ['R', 'IH0', 'F', 'AO1', 'R', 'M', 'D']), 'reformer': ('NN', ['R', 'IH0', 'F', 'AO1', 'R', 'M', 'ER0']), 'reformist': ('NN', ['R', 'IH0', 'F', 'AO1', 'R', 'M', 'IH0', 'S', 'T']), 'refractive': ('NN', ['R', 'AH0', 'F', 'R', 'AE1', 'K', 'T', 'IH0', 'V']), 'refractor': ('NN', ['R', 'AH0', 'F', 'R', 'AE1', 'K', 'T', 'ER0']), 'refractory': ('NN', ['R', 'AH0', 'F', 'R', 'AE1', 'K', 'T', 'ER0', 'IY0']), 'refrained': ('VBN', ['R', 'IH0', 'F', 'R', 'EY1', 'N', 'D']), 'refraining': ('VBG', ['R', 'IH0', 'F', 'R', 'EY1', 'N', 'IH0', 'NG']), 'refrain': ('NN', ['R', 'IH0', 'F', 'R', 'EY1', 'N']), 'refreshed': ('VBN', ['R', 'IY0', 'F', 'R', 'EH1', 'SH', 'T']), 'refreshing': ('VBG', ['R', 'IH0', 'F', 'R', 'EH1', 'SH', 'IH0', 'NG']), 'refresh': ('NN', ['R', 'IH0', 'F', 'R', 'EH1', 'SH']), 'refresher': ('NN', ['R', 'IH0', 'F', 'R', 'EH1', 'SH', 'ER0']), 'refreshment': ('NN', ['R', 'AH0', 'F', 'R', 'EH1', 'SH', 'M', 'AH0', 'N', 'T']), 'refrigerant': ('NN', ['R', 'IH0', 'F', 'R', 'IH1', 'JH', 'ER0', 'AH0', 'N', 'T']), 'refrigerated': ('VBN', ['R', 'IH0', 'F', 'R', 'IH1', 'JH', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'refrigerate': ('NN', ['R', 'IH0', 'F', 'R', 'IH1', 'JH', 'ER0', 'EY2', 'T']), 'refrigeration': ('NN', ['R', 'IH0', 'F', 'R', 'IH2', 'JH', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'refrigerator': ('NN', ['R', 'AH0', 'F', 'R', 'IH1', 'JH', 'ER0', 'EY2', 'T', 'ER0']), 'refuge': ('NN', ['R', 'EH1', 'F', 'Y', 'UW0', 'JH']), 'refugee': ('NN', ['R', 'EH1', 'F', 'Y', 'UW0', 'JH', 'IY0']), 'refund': ('NN', ['R', 'IH0', 'F', 'AH1', 'N', 'D']), 'refurbish': ('NN', ['R', 'IY0', 'F', 'ER1', 'B', 'IH0', 'SH']), 'refusal': ('NN', ['R', 'AH0', 'F', 'Y', 'UW1', 'Z', 'AH0', 'L']), 'refused': ('VBD', ['R', 'AH0', 'F', 'Y', 'UW1', 'Z', 'D']), 'refusing': ('VBG', ['R', 'AH0', 'F', 'Y', 'UW1', 'Z', 'IH0', 'NG']), 'refuse': ('NN', ['R', 'AH0', 'F', 'Y', 'UW1', 'Z']), 'refutation': ('NN', ['R', 'EH2', 'F', 'Y', 'UW0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'refuted': ('VBN', ['R', 'IH0', 'F', 'Y', 'UW1', 'T', 'IH0', 'D']), 'refuting': ('VBG', ['R', 'IH0', 'F', 'Y', 'UW1', 'T', 'IH0', 'NG']), 'refute': ('NN', ['R', 'IH0', 'F', 'Y', 'UW1', 'T']), 'regain': ('NN', ['R', 'IH0', 'G', 'EY1', 'N']), 'regal': ('NN', ['R', 'IY1', 'G', 'AH0', 'L']), 'regaled': ('VBN', ['R', 'IH0', 'G', 'EY1', 'L', 'D']), 'regaling': ('VBG', ['R', 'IH0', 'G', 'EY1', 'L', 'IH0', 'NG']), 'regalia': ('NN', ['R', 'IH0', 'G', 'EY1', 'L', 'Y', 'AH0']), 'regally': ('RB', ['R', 'IY1', 'G', 'AH0', 'L', 'IY0']), 'regarded': ('VBN', ['R', 'AH0', 'G', 'AA1', 'R', 'D', 'AH0', 'D']), 'regarding': ('VBG', ['R', 'AH0', 'G', 'AA1', 'R', 'D', 'IH0', 'NG']), 'regard': ('NN', ['R', 'AH0', 'G', 'AA1', 'R', 'D']), 'regardless': ('RB', ['R', 'AH0', 'G', 'AA1', 'R', 'D', 'L', 'AH0', 'S']), 'regattas': ('NNS', ['R', 'AH0', 'G', 'AA1', 'T', 'AH0', 'Z']), 'regatta': ('NN', ['R', 'IH0', 'G', 'AA1', 'T', 'AH0']), 'regel': ('NN', ['R', 'EH1', 'G', 'AH0', 'L']), 'regency': ('NN', ['R', 'IY1', 'JH', 'AH0', 'N', 'S', 'IY0']), 'regenerate': ('NN', ['R', 'IY0', 'JH', 'EH1', 'N', 'ER0', 'EY2', 'T']), 'regeneration': ('NN', ['R', 'IY0', 'JH', 'EH1', 'N', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'regent': ('NN', ['R', 'IY1', 'JH', 'AH0', 'N', 'T']), 'regime': ('NN', ['R', 'AH0', 'ZH', 'IY1', 'M']), 'regimen': ('NNS', ['R', 'EH1', 'JH', 'AH0', 'M', 'AH0', 'N']), 'regiment': ('NN', ['R', 'EH1', 'JH', 'AH0', 'M', 'AH0', 'N', 'T']), 'regimented': ('VBN', ['R', 'EH1', 'JH', 'AH0', 'M', 'EH2', 'N', 'T', 'IH0', 'D']), 'regimental': ('NN', ['R', 'EH2', 'JH', 'AH0', 'M', 'EH1', 'N', 'T', 'AH0', 'L']), 'region': ('NN', ['R', 'IY1', 'JH', 'AH0', 'N']), 'regional': ('JJ', ['R', 'IY1', 'JH', 'AH0', 'N', 'AH0', 'L']), 'register': ('NN', ['R', 'EH1', 'JH', 'IH0', 'S', 'T', 'ER0']), 'registered': ('VBN', ['R', 'EH1', 'JH', 'IH0', 'S', 'T', 'ER0', 'D']), 'registering': ('VBG', ['R', 'EH1', 'JH', 'IH0', 'S', 'T', 'ER0', 'IH0', 'NG']), 'registrant': ('NN', ['R', 'EH1', 'JH', 'AH0', 'S', 'T', 'R', 'AH0', 'N', 'T']), 'registrar': ('NN', ['R', 'EH1', 'JH', 'IH0', 'S', 'T', 'R', 'AA2', 'R']), 'registration': ('NN', ['R', 'EH2', 'JH', 'IH0', 'S', 'T', 'R', 'EY1', 'SH', 'AH0', 'N']), 'registry': ('NN', ['R', 'EH1', 'JH', 'IH0', 'S', 'T', 'R', 'IY0']), 'regress': ('NN', ['R', 'IY1', 'G', 'R', 'EH0', 'S']), 'regression': ('NN', ['R', 'AH0', 'G', 'R', 'EH1', 'SH', 'AH0', 'N']), 'regressive': ('NN', ['R', 'AH0', 'G', 'R', 'EH1', 'S', 'IH0', 'V']), 'regret': ('NN', ['R', 'AH0', 'G', 'R', 'EH1', 'T']), 'regretted': ('VBN', ['R', 'IH0', 'G', 'R', 'EH1', 'T', 'IH0', 'D']), 'regretting': ('VBG', ['R', 'IH0', 'G', 'R', 'EH1', 'T', 'IH0', 'NG']), 'regretful': ('NN', ['R', 'IH0', 'G', 'R', 'EH1', 'T', 'F', 'AH0', 'L']), 'regular': ('JJ', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'ER0']), 'regularity': ('NN', ['R', 'EH2', 'G', 'Y', 'AH0', 'L', 'EH1', 'R', 'AH0', 'T', 'IY0']), 'regularly': ('RB', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'ER0', 'L', 'IY0']), 'regulated': ('VBN', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'EY2', 'T', 'AH0', 'D']), 'regulating': ('VBG', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'regulate': ('NN', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'EY2', 'T']), 'regulation': ('NN', ['R', 'EH2', 'G', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'regulative': ('NN', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'V']), 'regulator': ('NN', ['R', 'EH1', 'G', 'Y', 'AH0', 'L', 'EY2', 'T', 'ER0']), 'rehabilitated': ('VBN', ['R', 'IY2', 'HH', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T', 'IH0', 'D']), 'rehabilitating': ('VBG', ['R', 'IY2', 'HH', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'rehabilitate': ('NN', ['R', 'IY2', 'HH', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'EY2', 'T']), 'rehabilitation': ('NN', ['R', 'IY2', 'HH', 'AH0', 'B', 'IH2', 'L', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'rehash': ('NN', ['R', 'IY0', 'HH', 'AE1', 'SH']), 'rehear': ('NN', ['R', 'IY0', 'HH', 'IY1', 'R']), 'rehearsal': ('NN', ['R', 'IH0', 'HH', 'ER1', 'S', 'AH0', 'L']), 'rehearsed': ('VBN', ['R', 'IY0', 'HH', 'ER1', 'S', 'T']), 'rehearsing': ('VBG', ['R', 'IH0', 'HH', 'ER1', 'S', 'IH0', 'NG']), 'rehearse': ('NN', ['R', 'IY0', 'HH', 'ER1', 'S']), 'rehire': ('NN', ['R', 'IY0', 'HH', 'AY1', 'R']), 'reis': ('NN', ['R', 'IY1', 'Z']), 'reif': ('NN', ['R', 'IY1', 'F']), 'reigle': ('NN', ['R', 'IY1', 'G', 'AH0', 'L']), 'reign': ('NN', ['R', 'EY1', 'N']), 'reigned': ('VBN', ['R', 'EY1', 'N', 'D']), 'reigning': ('VBG', ['R', 'EY1', 'N', 'IH0', 'NG']), 'reim': ('NN', ['R', 'IY1', 'M']), 'reimbursed': ('VBN', ['R', 'IY2', 'IH0', 'M', 'B', 'ER1', 'S', 'T']), 'reimbursing': ('VBG', ['R', 'IY2', 'IH0', 'M', 'B', 'ER1', 'S', 'IH0', 'NG']), 'reimburse': ('NN', ['R', 'IY2', 'IH0', 'M', 'B', 'ER1', 'S']), 'reimbursement': ('NN', ['R', 'IY2', 'IH0', 'M', 'B', 'ER1', 'S', 'M', 'AH0', 'N', 'T']), 'reimpose': ('VB', ['R', 'IY0', 'IH0', 'M', 'P', 'OW1', 'Z']), 'rein': ('NN', ['R', 'EY1', 'N']), 'reined': ('VBN', ['R', 'EY1', 'N', 'D']), 'reining': ('VBG', ['R', 'EY1', 'N', 'IH0', 'NG']), 'reincorporate': ('NN', ['R', 'IY0', 'IH0', 'N', 'K', 'AO1', 'R', 'P', 'ER0', 'EY2', 'T']), 'reindeer': ('NN', ['R', 'EY1', 'N', 'D', 'IH2', 'R']), 'reinforce': ('NN', ['R', 'IY2', 'IH0', 'N', 'F', 'AO1', 'R', 'S']), 'reinforcement': ('NN', ['R', 'IY2', 'IH0', 'N', 'F', 'AO1', 'R', 'S', 'M', 'AH0', 'N', 'T']), 'reins': ('NNS', ['R', 'EY1', 'N', 'Z']), 'reinspect': ('NN', ['R', 'IY0', 'IH0', 'N', 'S', 'P', 'EH1', 'K', 'T']), 'reinspection': ('NN', ['R', 'IY0', 'IH0', 'N', 'S', 'P', 'EH1', 'K', 'SH', 'AH0', 'N']), 'reinstall': ('NN', ['R', 'IY2', 'IH0', 'N', 'S', 'T', 'AA1', 'L']), 'reinstate': ('NN', ['R', 'IY2', 'IH0', 'N', 'S', 'T', 'EY1', 'T']), 'reinstatement': ('NN', ['R', 'IY2', 'IH0', 'N', 'S', 'T', 'EY1', 'T', 'M', 'AH0', 'N', 'T']), 'reinsurance': ('NN', ['R', 'IY2', 'IH0', 'N', 'SH', 'UH1', 'R', 'AH0', 'N', 'S']), 'reinsure': ('NN', ['R', 'IY2', 'IH0', 'N', 'SH', 'UH1', 'R']), 'reinsurer': ('NN', ['R', 'IY2', 'IH0', 'N', 'SH', 'UH1', 'R', 'ER0']), 'reintegrate': ('NN', ['R', 'IY0', 'IH1', 'N', 'T', 'AH0', 'G', 'R', 'EY2', 'T']), 'reintegration': ('NN', ['R', 'IY0', 'IH1', 'N', 'T', 'AH0', 'G', 'R', 'EY2', 'SH', 'AH0', 'N']), 'reintroduce': ('NN', ['R', 'IY0', 'IH0', 'N', 'T', 'R', 'AH0', 'D', 'UW1', 'S']), 'reinvest': ('NN', ['R', 'IY2', 'IH0', 'N', 'V', 'EH1', 'S', 'T']), 'reinvestment': ('NN', ['R', 'IY2', 'IH0', 'N', 'V', 'EH1', 'S', 'T', 'M', 'AH0', 'N', 'T']), 'reinvigorate': ('NN', ['R', 'IY2', 'IH0', 'N', 'V', 'IH1', 'G', 'ER0', 'EY2', 'T']), 'reissue': ('NN', ['R', 'IY0', 'IH1', 'SH', 'UW0']), 'reit': ('NN', ['R', 'AY1', 'T']), 'reiter': ('NN', ['R', 'AY1', 'T', 'ER0']), 'reiterated': ('VBN', ['R', 'IY0', 'IH1', 'T', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'reiterating': ('VBG', ['R', 'IY0', 'IH1', 'T', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'reiterate': ('NN', ['R', 'IY0', 'IH1', 'T', 'ER0', 'EY2', 'T']), 'reiteration': ('NN', ['R', 'IY0', 'IH2', 'T', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'rejected': ('VBN', ['R', 'IH0', 'JH', 'EH1', 'K', 'T', 'IH0', 'D']), 'rejecting': ('VBG', ['R', 'IH0', 'JH', 'EH1', 'K', 'T', 'IH0', 'NG']), 'reject': ('NN', ['R', 'IH0', 'JH', 'EH1', 'K', 'T']), 'rejection': ('NN', ['R', 'IH0', 'JH', 'EH1', 'K', 'SH', 'AH0', 'N']), 'rejoiced': ('VBN', ['R', 'IH0', 'JH', 'OY1', 'S', 'T']), 'rejoicing': ('VBG', ['R', 'IH0', 'JH', 'OY1', 'S', 'IH0', 'NG']), 'rejoice': ('NN', ['R', 'IH0', 'JH', 'OY1', 'S']), 'rejoined': ('VBN', ['R', 'IY0', 'JH', 'OY1', 'N', 'D']), 'rejoining': ('VBG', ['R', 'IY0', 'JH', 'OY1', 'N', 'IH0', 'NG']), 'rejoin': ('NN', ['R', 'IY0', 'JH', 'OY1', 'N']), 'rejoinder': ('NN', ['R', 'IH0', 'JH', 'OY1', 'N', 'D', 'ER0']), 'rejuvenate': ('NN', ['R', 'IH0', 'JH', 'UW1', 'V', 'AH0', 'N', 'EY2', 'T']), 'rejuvenation': ('NN', ['R', 'IH0', 'JH', 'UW2', 'V', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'rekindle': ('NN', ['R', 'IY0', 'K', 'IH1', 'N', 'D', 'AH0', 'L']), 'relapsed': ('VBN', ['R', 'IY0', 'L', 'AE1', 'P', 'S', 'T']), 'relapsing': ('VBG', ['R', 'IH0', 'L', 'AE1', 'P', 'S', 'IH0', 'NG']), 'relapse': ('NN', ['R', 'IY0', 'L', 'AE1', 'P', 'S']), 'related': ('JJ', ['R', 'IH0', 'L', 'EY1', 'T', 'IH0', 'D']), 'relating': ('VBG', ['R', 'IH0', 'L', 'EY1', 'T', 'IH0', 'NG']), 'relate': ('NN', ['R', 'IH0', 'L', 'EY1', 'T']), 'relation': ('NN', ['R', 'IY0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'relational': ('NN', ['R', 'IY0', 'L', 'EY1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'relationship': ('NN', ['R', 'IY0', 'L', 'EY1', 'SH', 'AH0', 'N', 'SH', 'IH2', 'P']), 'relative': ('NN', ['R', 'EH1', 'L', 'AH0', 'T', 'IH0', 'V']), 'relatively': ('RB', ['R', 'EH1', 'L', 'AH0', 'T', 'IH0', 'V', 'L', 'IY0']), 'relativity': ('NN', ['R', 'EH2', 'L', 'AH0', 'T', 'IH1', 'V', 'AH0', 'T', 'IY0']), 'relaxed': ('NN', ['R', 'IH0', 'L', 'AE1', 'K', 'S', 'T']), 'relaxing': ('VBG', ['R', 'IH0', 'L', 'AE1', 'K', 'S', 'IH0', 'NG']), 'relax': ('NN', ['R', 'IH0', 'L', 'AE1', 'K', 'S']), 'relaxation': ('NN', ['R', 'IY2', 'L', 'AE0', 'K', 'S', 'EY1', 'SH', 'AH0', 'N']), 'relaying': ('VBG', ['R', 'IY1', 'L', 'EY2', 'IH0', 'NG']), 'relay': ('NN', ['R', 'IY1', 'L', 'EY2']), 'release': ('NN', ['R', 'IY0', 'L', 'IY1', 'S']), 'released': ('VBN', ['R', 'IY0', 'L', 'IY1', 'S', 'T']), 'releasing': ('VBG', ['R', 'IY0', 'L', 'IY1', 'S', 'IH0', 'NG']), 'relegated': ('VBN', ['R', 'EH1', 'L', 'AH0', 'G', 'EY2', 'T', 'IH0', 'D']), 'relegating': ('VBG', ['R', 'EH1', 'L', 'AH0', 'G', 'EY2', 'T', 'IH0', 'NG']), 'relegate': ('NN', ['R', 'EH1', 'L', 'AH0', 'G', 'EY2', 'T']), 'relented': ('VBN', ['R', 'IH0', 'L', 'EH1', 'N', 'T', 'IH0', 'D']), 'relenting': ('VBG', ['R', 'IH0', 'L', 'EH1', 'N', 'T', 'IH0', 'NG']), 'relent': ('NN', ['R', 'IH0', 'L', 'EH1', 'N', 'T']), 'relentless': ('NN', ['R', 'IH0', 'L', 'EH1', 'N', 'T', 'L', 'IH0', 'S']), 'relevance': ('NN', ['R', 'EH1', 'L', 'AH0', 'V', 'AH0', 'N', 'S']), 'relevancy': ('NN', ['R', 'EH1', 'L', 'AH0', 'V', 'AH0', 'N', 'S', 'IY0']), 'relevant': ('NN', ['R', 'EH1', 'L', 'AH0', 'V', 'AH0', 'N', 'T']), 'reliability': ('NN', ['R', 'IY0', 'L', 'AY2', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'reliable': ('JJ', ['R', 'IH0', 'L', 'AY1', 'AH0', 'B', 'AH0', 'L']), 'reliance': ('NN', ['R', 'IH0', 'L', 'AY1', 'AH0', 'N', 'S']), 'reliant': ('NN', ['R', 'IH0', 'L', 'AY1', 'AH0', 'N', 'T']), 'relic': ('NN', ['R', 'EH1', 'L', 'IH0', 'K']), 'relief': ('NN', ['R', 'IH0', 'L', 'IY1', 'F']), 'relieved': ('VBN', ['R', 'IH0', 'L', 'IY1', 'V', 'D']), 'relieving': ('VBG', ['R', 'IH0', 'L', 'IY1', 'V', 'IH0', 'NG']), 'relieve': ('NN', ['R', 'IH0', 'L', 'IY1', 'V']), 'reliever': ('NN', ['R', 'IY0', 'L', 'IY1', 'V', 'ER0']), 'religion': ('NN', ['R', 'IH0', 'L', 'IH1', 'JH', 'AH0', 'N']), 'religionist': ('NN', ['R', 'IY0', 'L', 'IH1', 'JH', 'AH0', 'N', 'IH0', 'S', 'T']), 'religiosity': ('NN', ['R', 'IH0', 'L', 'IH2', 'JH', 'IY0', 'AA1', 'S', 'AH0', 'T', 'IY0']), 'religious': ('JJ', ['R', 'IH0', 'L', 'IH1', 'JH', 'AH0', 'S']), 'religiously': ('RB', ['R', 'IH0', 'L', 'IH1', 'JH', 'AH0', 'S', 'L', 'IY0']), 'relinquished': ('VBN', ['R', 'IH0', 'L', 'IH1', 'NG', 'K', 'W', 'IH0', 'SH', 'T']), 'relinquishing': ('VBG', ['R', 'IY0', 'L', 'IH1', 'NG', 'K', 'W', 'IH0', 'SH', 'IH0', 'NG']), 'relinquish': ('NN', ['R', 'IH0', 'L', 'IH1', 'NG', 'K', 'W', 'IH0', 'SH']), 'relished': ('VBN', ['R', 'EH1', 'L', 'IH0', 'SH', 'T']), 'relishing': ('VBG', ['R', 'EH1', 'L', 'IH0', 'SH', 'IH0', 'NG']), 'relish': ('NN', ['R', 'EH1', 'L', 'IH0', 'SH']), 'relive': ('NN', ['R', 'IY0', 'L', 'IH1', 'V']), 'reload': ('NN', ['R', 'IY0', 'L', 'OW1', 'D']), 'relocate': ('NN', ['R', 'IY0', 'L', 'OW1', 'K', 'EY0', 'T']), 'relocation': ('NN', ['R', 'IY0', 'L', 'OW1', 'K', 'EY1', 'SH', 'AH0', 'N']), 'reluctance': ('NN', ['R', 'IH0', 'L', 'AH1', 'K', 'T', 'AH0', 'N', 'S']), 'reluctant': ('NN', ['R', 'IH0', 'L', 'AH1', 'K', 'T', 'AH0', 'N', 'T']), 'reluctantly': ('RB', ['R', 'IH0', 'L', 'AH1', 'K', 'T', 'AH0', 'N', 'T', 'L', 'IY0']), 'relied': ('VBN', ['R', 'IH0', 'L', 'AY1', 'D']), 'relying': ('VBG', ['R', 'IY0', 'L', 'AY1', 'IH0', 'NG']), 'rely': ('RB', ['R', 'IH0', 'L', 'AY1']), 'remade': ('NN', ['R', 'IY0', 'M', 'EY1', 'D']), 'remained': ('VBD', ['R', 'IH0', 'M', 'EY1', 'N', 'D']), 'remaining': ('VBG', ['R', 'IH0', 'M', 'EY1', 'N', 'IH0', 'NG']), 'remain': ('NN', ['R', 'IH0', 'M', 'EY1', 'N']), 'remainder': ('NN', ['R', 'IH0', 'M', 'EY1', 'N', 'D', 'ER0']), 'remake': ('NN', ['R', 'IY1', 'M', 'EY1', 'K']), 'remanded': ('VBN', ['R', 'IH0', 'M', 'AE1', 'N', 'D', 'IH0', 'D']), 'remand': ('NN', ['R', 'IH0', 'M', 'AE1', 'N', 'D']), 'remarked': ('VBN', ['R', 'IH0', 'M', 'AA1', 'R', 'K', 'T']), 'remarking': ('VBG', ['R', 'IH0', 'M', 'AA1', 'R', 'K', 'IH0', 'NG']), 'remark': ('NN', ['R', 'IH0', 'M', 'AA1', 'R', 'K']), 'remarkable': ('JJ', ['R', 'IH0', 'M', 'AA1', 'R', 'K', 'AH0', 'B', 'AH0', 'L']), 'remarriage': ('NN', ['R', 'IY0', 'M', 'EH1', 'R', 'IH0', 'JH']), 'remarry': ('NN', ['R', 'IY0', 'M', 'EH1', 'R', 'IY0']), 'remedial': ('NN', ['R', 'IH0', 'M', 'IY1', 'D', 'IY0', 'AH0', 'L']), 'remediate': ('NN', ['R', 'IY0', 'M', 'IY1', 'D', 'IY0', 'AH0', 'T']), 'remedies': ('NNS', ['R', 'EH1', 'M', 'AH0', 'D', 'IY0', 'Z']), 'remedy': ('NN', ['R', 'EH1', 'M', 'AH0', 'D', 'IY0']), 'remedied': ('VBN', ['R', 'EH1', 'M', 'AH0', 'D', 'IY0', 'D']), 'remedying': ('VBG', ['R', 'EH1', 'M', 'AH0', 'D', 'IY0', 'IH0', 'NG']), 'remembered': ('VBN', ['R', 'IH0', 'M', 'EH1', 'M', 'B', 'ER0', 'D']), 'remembering': ('VBG', ['R', 'IH0', 'M', 'EH1', 'M', 'B', 'ER0', 'IH0', 'NG']), 'remember': ('VB', ['R', 'IH0', 'M', 'EH1', 'M', 'B', 'ER0']), 'remembrance': ('NN', ['R', 'IY0', 'M', 'EH1', 'M', 'B', 'R', 'AH0', 'N', 'S']), 'remind': ('NN', ['R', 'IY0', 'M', 'AY1', 'N', 'D']), 'reminder': ('NN', ['R', 'IY0', 'M', 'AY1', 'N', 'D', 'ER0']), 'reminiscence': ('NN', ['R', 'EH2', 'M', 'AH0', 'N', 'IH1', 'S', 'AH0', 'N', 'S']), 'reminiscent': ('NN', ['R', 'EH2', 'M', 'AH0', 'N', 'IH1', 'S', 'AH0', 'N', 'T']), 'remiss': ('NN', ['R', 'IY0', 'M', 'IH1', 'S']), 'remission': ('NN', ['R', 'IY0', 'M', 'IH1', 'SH', 'AH0', 'N']), 'remitted': ('VBN', ['R', 'IY0', 'M', 'IH1', 'T', 'IH0', 'D']), 'remit': ('NN', ['R', 'IY0', 'M', 'IH1', 'T']), 'remittance': ('NN', ['R', 'IY0', 'M', 'IH1', 'T', 'AH0', 'N', 'S']), 'remnant': ('NN', ['R', 'EH1', 'M', 'N', 'AH0', 'N', 'T']), 'remodel': ('NN', ['R', 'IY0', 'M', 'AA1', 'D', 'AH0', 'L']), 'remold': ('NN', ['R', 'IY0', 'M', 'OW1', 'L', 'D']), 'remorse': ('NN', ['R', 'IH0', 'M', 'AO1', 'R', 'S']), 'remorseful': ('NN', ['R', 'IH0', 'M', 'AO1', 'R', 'S', 'F', 'AH0', 'L']), 'remorseless': ('NN', ['R', 'IH0', 'M', 'AO1', 'R', 'S', 'L', 'AH0', 'S']), 'remote': ('NN', ['R', 'IH0', 'M', 'OW1', 'T']), 'removable': ('JJ', ['R', 'IH0', 'M', 'UW1', 'V', 'AH0', 'B', 'AH0', 'L']), 'removal': ('NN', ['R', 'IH0', 'M', 'UW1', 'V', 'AH0', 'L']), 'removed': ('VBN', ['R', 'IY0', 'M', 'UW1', 'V', 'D']), 'removing': ('VBG', ['R', 'IY0', 'M', 'UW1', 'V', 'IH0', 'NG']), 'remove': ('VB', ['R', 'IY0', 'M', 'UW1', 'V']), 'remover': ('NN', ['R', 'IH0', 'M', 'UW1', 'V', 'ER0']), 'remunerate': ('NN', ['R', 'IH0', 'M', 'Y', 'UW2', 'N', 'ER0', 'EY1', 'T']), 'remuneration': ('NN', ['R', 'IH0', 'M', 'Y', 'UW2', 'N', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'remunerative': ('NN', ['R', 'IY0', 'M', 'Y', 'UW1', 'N', 'ER0', 'AH0', 'T', 'IH0', 'V']), 'ren': ('NNS', ['R', 'EH1', 'N']), 'renaissance': ('NN', ['R', 'EH2', 'N', 'AH0', 'S', 'AA1', 'N', 'S']), 'renal': ('NN', ['R', 'IY1', 'N', 'AH0', 'L']), 'rename': ('NN', ['R', 'IY0', 'N', 'EY1', 'M']), 'renard': ('NN', ['R', 'IH0', 'N', 'AA1', 'R', 'D']), 'renate': ('NN', ['R', 'AH0', 'N', 'AA1', 'T', 'AH0']), 'rent': ('NN', ['R', 'EH1', 'N', 'T']), 'rending': ('VBG', ['R', 'EH1', 'N', 'D', 'IH0', 'NG']), 'render': ('NN', ['R', 'EH1', 'N', 'D', 'ER0']), 'rendered': ('VBN', ['R', 'EH1', 'N', 'D', 'ER0', 'D']), 'rendering': ('VBG', ['R', 'EH1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'rendezvous': ('JJ', ['R', 'AA1', 'N', 'D', 'IH0', 'V', 'UW2']), 'rendition': ('NN', ['R', 'EH0', 'N', 'D', 'IH1', 'SH', 'AH0', 'N']), 'renegade': ('NN', ['R', 'EH1', 'N', 'AH0', 'G', 'EY2', 'D']), 'renege': ('NN', ['R', 'IH0', 'N', 'IH1', 'G']), 'renewing': ('VBG', ['R', 'IH0', 'N', 'UW1', 'IH0', 'NG']), 'renew': ('NN', ['R', 'IH0', 'N', 'UW1']), 'renewable': ('JJ', ['R', 'IY0', 'N', 'UW1', 'AH0', 'B', 'AH0', 'L']), 'renewal': ('NN', ['R', 'IH0', 'N', 'UW1', 'AH0', 'L']), 'renne': ('NN', ['R', 'EH1', 'N']), 'renner': ('NN', ['R', 'EH1', 'N', 'ER0']), 'rennet': ('NN', ['R', 'EH1', 'N', 'AH0', 'T']), 'renounced': ('VBN', ['R', 'IH0', 'N', 'AW1', 'N', 'S', 'T']), 'renouncing': ('VBG', ['R', 'IH0', 'N', 'AW1', 'N', 'S', 'IH0', 'NG']), 'renounce': ('NN', ['R', 'IH0', 'N', 'AW1', 'N', 'S']), 'renovate': ('NN', ['R', 'EH1', 'N', 'AH0', 'V', 'EY2', 'T']), 'renovation': ('NN', ['R', 'EH2', 'N', 'AH0', 'V', 'EY1', 'SH', 'AH0', 'N']), 'renovator': ('NN', ['R', 'EH1', 'N', 'AH0', 'V', 'EY2', 'T', 'ER0']), 'renown': ('NN', ['R', 'IH0', 'N', 'AW1', 'N']), 'renowned': ('VBN', ['R', 'IH0', 'N', 'AW1', 'N', 'D']), 'rented': ('VBN', ['R', 'EH1', 'N', 'T', 'AH0', 'D']), 'renting': ('VBG', ['R', 'EH1', 'N', 'T', 'IH0', 'NG']), 'rentable': ('JJ', ['R', 'EH1', 'N', 'T', 'AH0', 'B', 'AH0', 'L']), 'rental': ('NN', ['R', 'EH1', 'N', 'T', 'AH0', 'L']), 'renter': ('NN', ['R', 'EH1', 'N', 'T', 'ER0']), 'renunciation': ('NN', ['R', 'IH0', 'N', 'AH2', 'N', 'S', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'reoccupy': ('NN', ['R', 'IY0', 'AO1', 'K', 'Y', 'UW2', 'P', 'AY0']), 'reopen': ('VB', ['R', 'IY0', 'OW1', 'P', 'AH0', 'N']), 'reorder': ('NN', ['R', 'IY0', 'AO1', 'R', 'D', 'ER0']), 'reorganization': ('NN', ['R', 'IY2', 'AO0', 'R', 'G', 'AH0', 'N', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'reorganize': ('VB', ['R', 'IY0', 'AO1', 'R', 'G', 'AH0', 'N', 'AY2', 'Z']), 'reorient': ('NN', ['R', 'IY0', 'AO1', 'R', 'IY0', 'EH0', 'N', 'T']), 'rep': ('NN', ['R', 'EH1', 'P']), 'repack': ('NN', ['R', 'IY0', 'P', 'AE1', 'K']), 'repaid': ('NN', ['R', 'IY0', 'P', 'EY1', 'D']), 'repaint': ('NN', ['R', 'IY0', 'P', 'EY1', 'N', 'T']), 'repair': ('NN', ['R', 'IH0', 'P', 'EH1', 'R']), 'repaired': ('VBN', ['R', 'IH0', 'P', 'EH1', 'R', 'D']), 'repairing': ('VBG', ['R', 'IH0', 'P', 'EH1', 'R', 'IH0', 'NG']), 'repairable': ('JJ', ['R', 'IH0', 'P', 'EH1', 'R', 'AH0', 'B', 'AH0', 'L']), 'reparation': ('NN', ['R', 'EH2', 'P', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'repartee': ('NN', ['R', 'EH2', 'P', 'ER0', 'T', 'IY1']), 'repass': ('NN', ['R', 'IY0', 'P', 'AE1', 'S']), 'repatriate': ('NN', ['R', 'IY0', 'P', 'EY1', 'T', 'R', 'IY0', 'EY2', 'T']), 'repatriation': ('NN', ['R', 'IY0', 'P', 'EY2', 'T', 'R', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'repaying': ('VBG', ['R', 'IY0', 'P', 'EY1', 'IH0', 'NG']), 'repay': ('NN', ['R', 'IY0', 'P', 'EY1']), 'repayable': ('JJ', ['R', 'IY0', 'P', 'EY1', 'AH0', 'B', 'AH0', 'L']), 'repayment': ('NN', ['R', 'IY0', 'P', 'EY1', 'M', 'AH0', 'N', 'T']), 'repealed': ('VBN', ['R', 'IH0', 'P', 'IY1', 'L', 'D']), 'repealing': ('VBG', ['R', 'IY0', 'P', 'IY1', 'L', 'IH0', 'NG']), 'repeal': ('NN', ['R', 'IH0', 'P', 'IY1', 'L']), 'repeated': ('VBN', ['R', 'IH0', 'P', 'IY1', 'T', 'IH0', 'D']), 'repeating': ('VBG', ['R', 'IH0', 'P', 'IY1', 'T', 'IH0', 'NG']), 'repeat': ('NN', ['R', 'IH0', 'P', 'IY1', 'T']), 'repeatedly': ('RB', ['R', 'IH0', 'P', 'IY1', 'T', 'IH0', 'D', 'L', 'IY0']), 'repeater': ('NN', ['R', 'IH0', 'P', 'IY1', 'T', 'ER0']), 'repelled': ('VBN', ['R', 'AH0', 'P', 'EH1', 'L', 'D']), 'repelling': ('VBG', ['R', 'AH0', 'P', 'EH1', 'L', 'IH0', 'NG']), 'repel': ('NN', ['R', 'IH0', 'P', 'EH1', 'L']), 'repellent': ('NN', ['R', 'IH0', 'P', 'EH1', 'L', 'AH0', 'N', 'T']), 'repent': ('NN', ['R', 'IH0', 'P', 'EH1', 'N', 'T']), 'repented': ('VBN', ['R', 'IH0', 'P', 'EH1', 'N', 'T', 'IH0', 'D']), 'repenting': ('VBG', ['R', 'IH0', 'P', 'EH1', 'N', 'T', 'IH0', 'NG']), 'repentance': ('NN', ['R', 'IH0', 'P', 'EH1', 'N', 'T', 'AH0', 'N', 'S']), 'repentant': ('NN', ['R', 'IH0', 'P', 'EH1', 'N', 'T', 'AH0', 'N', 'T']), 'repercussion': ('NN', ['R', 'IY2', 'P', 'ER0', 'K', 'AH1', 'SH', 'AH0', 'N']), 'repertoire': ('NN', ['R', 'EH1', 'P', 'ER0', 'T', 'W', 'AA2', 'R']), 'repertory': ('NN', ['R', 'EH1', 'P', 'ER0', 'T', 'AO2', 'R', 'IY0']), 'repetition': ('NN', ['R', 'EH2', 'P', 'AH0', 'T', 'IH1', 'SH', 'AH0', 'N']), 'repetitious': ('JJ', ['R', 'EH2', 'P', 'AH0', 'T', 'IH1', 'SH', 'AH0', 'S']), 'repetitive': ('NN', ['R', 'IH0', 'P', 'EH1', 'T', 'IH0', 'T', 'IH0', 'V']), 'replace': ('VB', ['R', 'IY2', 'P', 'L', 'EY1', 'S']), 'replaceable': ('JJ', ['R', 'IY2', 'P', 'L', 'EY1', 'S', 'AH0', 'B', 'AH0', 'L']), 'replacement': ('NN', ['R', 'IH0', 'P', 'L', 'EY1', 'S', 'M', 'AH0', 'N', 'T']), 'replant': ('NN', ['R', 'IY0', 'P', 'L', 'AE1', 'N', 'T']), 'replenished': ('VBN', ['R', 'IY0', 'P', 'L', 'EH1', 'N', 'IH0', 'SH', 'T']), 'replenishing': ('VBG', ['R', 'IY0', 'P', 'L', 'EH1', 'N', 'IH0', 'SH', 'IH0', 'NG']), 'replenish': ('NN', ['R', 'IY0', 'P', 'L', 'EH1', 'N', 'IH0', 'SH']), 'replenishment': ('NN', ['R', 'IH0', 'P', 'L', 'EH1', 'N', 'IH0', 'SH', 'M', 'AH0', 'N', 'T']), 'replete': ('NN', ['R', 'IY0', 'P', 'L', 'IY1', 'T']), 'replica': ('NN', ['R', 'EH1', 'P', 'L', 'IH0', 'K', 'AH0']), 'replicate': ('NN', ['R', 'EH1', 'P', 'L', 'AH0', 'K', 'EY2', 'T']), 'replicated': ('VBN', ['R', 'EH1', 'P', 'L', 'IH0', 'K', 'EY2', 'T', 'IH0', 'D']), 'replication': ('NN', ['R', 'EH2', 'P', 'L', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'replied': ('VBD', ['R', 'IH0', 'P', 'L', 'AY1', 'D']), 'replying': ('VBG', ['R', 'IH0', 'P', 'L', 'AY1', 'IH0', 'NG']), 'reply': ('NN', ['R', 'IH0', 'P', 'L', 'AY1']), 'replies': ('NNS', ['R', 'IH0', 'P', 'L', 'AY1', 'Z']), 'reported': ('VBD', ['R', 'IY2', 'P', 'AO1', 'R', 'T', 'AH0', 'D']), 'report': ('NN', ['R', 'IY0', 'P', 'AO1', 'R', 'T']), 'reportable': ('JJ', ['R', 'IH0', 'P', 'AO1', 'R', 'T', 'AH0', 'B', 'AH0', 'L']), 'reportage': ('NN', ['R', 'IH0', 'P', 'AO1', 'R', 'T', 'IH0', 'JH']), 'reporter': ('NN', ['R', 'IH0', 'P', 'AO1', 'R', 'T', 'ER0']), 'reportorial': ('NN', ['R', 'EH2', 'P', 'ER0', 'T', 'AO1', 'R', 'IY0', 'AH0', 'L']), 'repose': ('VB', ['R', 'IY0', 'P', 'OW1', 'Z']), 'reposition': ('NN', ['R', 'IY2', 'P', 'AH0', 'Z', 'IH1', 'SH', 'AH0', 'N']), 'repository': ('NN', ['R', 'IY0', 'P', 'AA1', 'Z', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'repossess': ('NN', ['R', 'IY2', 'P', 'AH0', 'Z', 'EH1', 'S']), 'repossession': ('NN', ['R', 'IY2', 'P', 'AH0', 'Z', 'EH1', 'SH', 'AH0', 'N']), 'reprehensible': ('JJ', ['R', 'EH2', 'P', 'R', 'IH0', 'HH', 'EH1', 'N', 'S', 'AH0', 'B', 'AH0', 'L']), 'represent': ('NN', ['R', 'EH2', 'P', 'R', 'AH0', 'Z', 'EH1', 'N', 'T']), 'representation': ('NN', ['R', 'EH2', 'P', 'R', 'AH0', 'Z', 'EH0', 'N', 'T', 'EY1', 'SH', 'AH0', 'N']), 'representative': ('NN', ['R', 'EH2', 'P', 'R', 'AH0', 'Z', 'EH1', 'N', 'T', 'AH0', 'T', 'IH0', 'V']), 'repress': ('NN', ['R', 'IY0', 'P', 'R', 'EH1', 'S']), 'repression': ('NN', ['R', 'IY0', 'P', 'R', 'EH1', 'SH', 'AH0', 'N']), 'repressive': ('NN', ['R', 'IY0', 'P', 'R', 'EH1', 'S', 'IH0', 'V']), 'reprieve': ('NN', ['R', 'IY0', 'P', 'R', 'IY1', 'V']), 'reprimand': ('NN', ['R', 'EH1', 'P', 'R', 'AH0', 'M', 'AE2', 'N', 'D']), 'reprimanded': ('VBN', ['R', 'EH1', 'P', 'R', 'AH0', 'M', 'AE2', 'N', 'D', 'IH0', 'D']), 'reprint': ('NN', ['R', 'IY0', 'P', 'R', 'IH1', 'N', 'T']), 'reprisal': ('NN', ['R', 'IY0', 'P', 'R', 'AY1', 'Z', 'AH0', 'L']), 'reprise': ('NN', ['R', 'IH0', 'P', 'R', 'AY1', 'Z']), 'reproach': ('NN', ['R', 'IY0', 'P', 'R', 'OW1', 'CH']), 'reprobate': ('NN', ['R', 'EH1', 'P', 'R', 'AO0', 'B', 'EY0', 'T']), 'reproduce': ('NN', ['R', 'IY2', 'P', 'R', 'AH0', 'D', 'UW1', 'S']), 'reproduction': ('NN', ['R', 'IY2', 'P', 'R', 'AH0', 'D', 'AH1', 'K', 'SH', 'AH0', 'N']), 'reproductive': ('NN', ['R', 'IY2', 'P', 'R', 'AH0', 'D', 'AH1', 'K', 'T', 'IH0', 'V']), 'reptile': ('NN', ['R', 'EH1', 'P', 'T', 'AY0', 'L']), 'reptilian': ('NN', ['R', 'EH0', 'P', 'T', 'IH1', 'L', 'Y', 'AH0', 'N']), 'republic': ('NN', ['R', 'IY0', 'P', 'AH1', 'B', 'L', 'AH0', 'K']), 'republican': ('JJ', ['R', 'IH0', 'P', 'AH1', 'B', 'L', 'IH0', 'K', 'AH0', 'N']), 'republicanism': ('NN', ['R', 'IH0', 'P', 'AH1', 'B', 'L', 'IH0', 'K', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'repudiated': ('VBN', ['R', 'IY0', 'P', 'Y', 'UW1', 'D', 'IY0', 'EY2', 'T', 'AH0', 'D']), 'repudiating': ('VBG', ['R', 'IY0', 'P', 'Y', 'UW1', 'D', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'repudiate': ('NN', ['R', 'IY0', 'P', 'Y', 'UW1', 'D', 'IY0', 'EY2', 'T']), 'repudiation': ('NN', ['R', 'IH0', 'P', 'Y', 'UW2', 'D', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'repugnant': ('NN', ['R', 'IH0', 'P', 'AH1', 'G', 'N', 'AH0', 'N', 'T']), 'repulsed': ('VBN', ['R', 'IY0', 'P', 'AH1', 'L', 'S', 'T']), 'repulsing': ('VBG', ['R', 'IY0', 'P', 'AH1', 'L', 'S', 'IH0', 'NG']), 'repulse': ('NN', ['R', 'IY0', 'P', 'AH1', 'L', 'S']), 'repulsive': ('NN', ['R', 'IY0', 'P', 'AH1', 'L', 'S', 'IH0', 'V']), 'repurchase': ('NN', ['R', 'IY0', 'P', 'ER1', 'CH', 'AH0', 'S']), 'reputable': ('JJ', ['R', 'EH1', 'P', 'Y', 'AH0', 'T', 'AH0', 'B', 'AH0', 'L']), 'reputation': ('NN', ['R', 'EH2', 'P', 'Y', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'reputed': ('VBN', ['R', 'IH0', 'P', 'Y', 'UW1', 'T', 'IH0', 'D']), 'repute': ('NN', ['R', 'IY0', 'P', 'Y', 'UW1', 'T']), 'reputedly': ('RB', ['R', 'IH0', 'P', 'Y', 'UW1', 'T', 'IH0', 'D', 'L', 'IY0']), 'request': ('NN', ['R', 'IH0', 'K', 'W', 'EH1', 'S', 'T']), 'requested': ('VBN', ['R', 'IH0', 'K', 'W', 'EH1', 'S', 'T', 'IH0', 'D']), 'requesting': ('VBG', ['R', 'IH0', 'K', 'W', 'EH1', 'S', 'T', 'IH0', 'NG']), 'requester': ('NN', ['R', 'IH0', 'K', 'W', 'EH1', 'S', 'T', 'ER0']), 'requiem': ('NN', ['R', 'EH1', 'K', 'W', 'IY0', 'AH0', 'M']), 'required': ('VBN', ['R', 'IY0', 'K', 'W', 'AY1', 'ER0', 'D']), 'requiring': ('VBG', ['R', 'IY0', 'K', 'W', 'AY1', 'ER0', 'IH0', 'NG']), 'require': ('NN', ['R', 'IY2', 'K', 'W', 'AY1', 'ER0']), 'requirement': ('NN', ['R', 'IH0', 'K', 'W', 'AY1', 'R', 'M', 'AH0', 'N', 'T']), 'requisite': ('NN', ['R', 'EH1', 'K', 'W', 'AH0', 'Z', 'AH0', 'T']), 'requisition': ('NN', ['R', 'EH2', 'K', 'W', 'AH0', 'Z', 'IH1', 'SH', 'AH0', 'N']), 'res': ('NNS', ['R', 'EY1', 'Z']), 'resale': ('NN', ['R', 'IY1', 'S', 'EY2', 'L']), 'rescinded': ('VBN', ['R', 'IH0', 'S', 'IH1', 'N', 'D', 'IH0', 'D']), 'rescinding': ('VBG', ['R', 'IH0', 'S', 'IH1', 'N', 'D', 'IH0', 'NG']), 'rescind': ('NN', ['R', 'IH0', 'S', 'IH1', 'N', 'D']), 'rescission': ('NN', ['R', 'IH0', 'S', 'IH1', 'ZH', 'AH0', 'N']), 'rescued': ('VBN', ['R', 'EH1', 'S', 'K', 'Y', 'UW0', 'D']), 'rescuing': ('VBG', ['R', 'EH1', 'S', 'K', 'Y', 'UW0', 'IH0', 'NG']), 'rescue': ('NN', ['R', 'EH1', 'S', 'K', 'Y', 'UW0']), 'rescuer': ('NN', ['R', 'EH1', 'S', 'K', 'Y', 'UW2', 'ER0']), 'research': ('NN', ['R', 'IY0', 'S', 'ER1', 'CH']), 'researcher': ('NN', ['R', 'IY1', 'S', 'ER0', 'CH', 'ER0']), 'reseda': ('NN', ['R', 'EH0', 'S', 'EY1', 'D', 'AH0']), 'resell': ('NN', ['R', 'IY0', 'S', 'EH1', 'L']), 'resemblance': ('NN', ['R', 'IH0', 'Z', 'EH1', 'M', 'B', 'L', 'AH0', 'N', 'S']), 'resembled': ('VBN', ['R', 'IH0', 'Z', 'EH1', 'M', 'B', 'AH0', 'L', 'D']), 'resembling': ('VBG', ['R', 'IH0', 'Z', 'EH1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'resemble': ('JJ', ['R', 'IH0', 'Z', 'EH1', 'M', 'B', 'AH0', 'L']), 'resented': ('VBN', ['R', 'IY0', 'Z', 'EH1', 'N', 'T', 'AH0', 'D']), 'resenting': ('VBG', ['R', 'IH0', 'Z', 'EH1', 'N', 'T', 'IH0', 'NG']), 'resent': ('NN', ['R', 'IH0', 'Z', 'EH1', 'N', 'T']), 'resentful': ('NN', ['R', 'IH0', 'Z', 'EH1', 'N', 'T', 'F', 'AH0', 'L']), 'resentment': ('NN', ['R', 'IH0', 'Z', 'EH1', 'N', 'T', 'M', 'AH0', 'N', 'T']), 'reservation': ('NN', ['R', 'EH2', 'Z', 'ER0', 'V', 'EY1', 'SH', 'AH0', 'N']), 'reserved': ('VBN', ['R', 'IH0', 'Z', 'ER1', 'V', 'D']), 'reserving': ('VBG', ['R', 'IH0', 'Z', 'ER1', 'V', 'IH0', 'NG']), 'reserve': ('NN', ['R', 'IH0', 'Z', 'ER1', 'V']), 'reservist': ('NN', ['R', 'IH0', 'Z', 'ER1', 'V', 'IH0', 'S', 'T']), 'reservoir': ('NN', ['R', 'EH1', 'Z', 'AH0', 'V', 'W', 'AA2', 'R']), 'reset': ('NN', ['R', 'IY0', 'S', 'EH1', 'T']), 'resettle': ('NN', ['R', 'IY0', 'S', 'EH1', 'T', 'AH0', 'L']), 'resettlement': ('NN', ['R', 'IY0', 'S', 'EH1', 'T', 'AH0', 'L', 'M', 'AH0', 'N', 'T']), 'reshape': ('NN', ['R', 'IY0', 'SH', 'EY1', 'P']), 'resided': ('VBN', ['R', 'IH0', 'Z', 'AY1', 'D', 'IH0', 'D']), 'residing': ('VBG', ['R', 'IH0', 'Z', 'AY1', 'D', 'IH0', 'NG']), 'reside': ('NN', ['R', 'IH0', 'Z', 'AY1', 'D']), 'residence': ('NN', ['R', 'EH1', 'Z', 'IH0', 'D', 'AH0', 'N', 'S']), 'residency': ('NN', ['R', 'EH1', 'Z', 'IH0', 'D', 'AH0', 'N', 'S', 'IY0']), 'resident': ('NN', ['R', 'EH1', 'Z', 'IH0', 'D', 'AH0', 'N', 'T']), 'residential': ('JJ', ['R', 'EH2', 'Z', 'IH0', 'D', 'EH1', 'N', 'CH', 'AH0', 'L']), 'residual': ('JJ', ['R', 'IH0', 'Z', 'IH1', 'JH', 'UW0', 'AH0', 'L']), 'residue': ('NN', ['R', 'EH1', 'Z', 'AH0', 'D', 'UW2']), 'resigned': ('VBD', ['R', 'IH0', 'Z', 'AY1', 'N', 'D']), 'resigning': ('VBG', ['R', 'IH0', 'Z', 'AY1', 'N', 'IH0', 'NG']), 'resign': ('NN', ['R', 'IH0', 'Z', 'AY1', 'N']), 'resignation': ('NN', ['R', 'EH2', 'Z', 'AH0', 'G', 'N', 'EY1', 'SH', 'AH0', 'N']), 'resignee': ('NN', ['R', 'EH2', 'Z', 'IH0', 'G', 'N', 'IY1']), 'resilience': ('NN', ['R', 'IH0', 'Z', 'IH1', 'L', 'IY0', 'AH0', 'N', 'S']), 'resiliency': ('NN', ['R', 'IH0', 'Z', 'IH1', 'L', 'Y', 'AH0', 'N', 'S', 'IY0']), 'resilient': ('NN', ['R', 'IH0', 'Z', 'IH1', 'L', 'Y', 'AH0', 'N', 'T']), 'resin': ('NN', ['R', 'EH1', 'Z', 'AH0', 'N']), 'resisted': ('VBN', ['R', 'IH0', 'Z', 'IH1', 'S', 'T', 'IH0', 'D']), 'resisting': ('VBG', ['R', 'IH0', 'Z', 'IH1', 'S', 'T', 'IH0', 'NG']), 'resist': ('NN', ['R', 'IH0', 'Z', 'IH1', 'S', 'T']), 'resistance': ('NN', ['R', 'IH0', 'Z', 'IH1', 'S', 'T', 'AH0', 'N', 'S']), 'resistant': ('NN', ['R', 'IH0', 'Z', 'IH1', 'S', 'T', 'AH0', 'N', 'T']), 'resolute': ('NN', ['R', 'EH1', 'Z', 'AH0', 'L', 'UW2', 'T']), 'resolutely': ('RB', ['R', 'EH1', 'S', 'AH0', 'L', 'UW2', 'T', 'L', 'IY0']), 'resolution': ('NN', ['R', 'EH2', 'Z', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N']), 'resolved': ('VBN', ['R', 'IY0', 'Z', 'AA1', 'L', 'V', 'D']), 'resolving': ('VBG', ['R', 'IY0', 'Z', 'AA1', 'L', 'V', 'IH0', 'NG']), 'resolve': ('NN', ['R', 'IY0', 'Z', 'AA1', 'L', 'V']), 'resonance': ('NN', ['R', 'EH1', 'Z', 'AH0', 'N', 'AH0', 'N', 'S']), 'resonant': ('NN', ['R', 'EH1', 'Z', 'AH0', 'N', 'AH0', 'N', 'T']), 'resort': ('NN', ['R', 'IH0', 'Z', 'AO1', 'R', 'T']), 'resorted': ('VBN', ['R', 'IH0', 'Z', 'AO1', 'R', 'T', 'IH0', 'D']), 'resorting': ('VBG', ['R', 'IH0', 'Z', 'AO1', 'R', 'T', 'IH0', 'NG']), 'resounding': ('VBG', ['R', 'IY0', 'S', 'AW1', 'N', 'D', 'IH0', 'NG']), 'resound': ('NN', ['R', 'IY2', 'S', 'AW1', 'N', 'D']), 'resource': ('NN', ['R', 'IY1', 'S', 'AO0', 'R', 'S']), 'resourceful': ('NN', ['R', 'IY0', 'S', 'AO1', 'R', 'S', 'F', 'AH0', 'L']), 'respeak': ('NN', ['R', 'IY0', 'S', 'P', 'IY1', 'K']), 'respected': ('VBN', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'D']), 'respecting': ('VBG', ['R', 'IY0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'NG']), 'respect': ('NN', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T']), 'respectability': ('NN', ['R', 'IY0', 'S', 'P', 'EH2', 'K', 'T', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'respectable': ('JJ', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T', 'AH0', 'B', 'AH0', 'L']), 'respectful': ('NN', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T', 'F', 'AH0', 'L']), 'respective': ('NN', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'V']), 'respectively': ('RB', ['R', 'IH0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'V', 'L', 'IY0']), 'respiration': ('NN', ['R', 'EH2', 'S', 'P', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'respirator': ('NN', ['R', 'EH1', 'S', 'P', 'ER0', 'EY2', 'T', 'ER0']), 'respiratory': ('NN', ['R', 'EH1', 'S', 'P', 'ER0', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'respite': ('NN', ['R', 'EH1', 'S', 'P', 'IH0', 'T']), 'resplendent': ('NN', ['R', 'IY0', 'S', 'P', 'L', 'EH1', 'N', 'D', 'AH0', 'N', 'T']), 'responded': ('VBD', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'D', 'IH0', 'D']), 'responding': ('VBG', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'D', 'IH0', 'NG']), 'respond': ('NN', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'D']), 'respondent': ('NN', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'D', 'AH0', 'N', 'T']), 'response': ('NN', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'S']), 'responsibility': ('NN', ['R', 'IY0', 'S', 'P', 'AA2', 'N', 'S', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'responsible': ('JJ', ['R', 'IY0', 'S', 'P', 'AA1', 'N', 'S', 'AH0', 'B', 'AH0', 'L']), 'responsive': ('NN', ['R', 'IH0', 'S', 'P', 'AA1', 'N', 'S', 'IH0', 'V']), 'rest': ('NN', ['R', 'EH1', 'S', 'T']), 'rested': ('VBN', ['R', 'EH1', 'S', 'T', 'AH0', 'D']), 'resting': ('VBG', ['R', 'EH1', 'S', 'T', 'IH0', 'NG']), 'restate': ('NN', ['R', 'IY0', 'S', 'T', 'EY1', 'T']), 'restaurant': ('NN', ['R', 'EH1', 'S', 'T', 'ER0', 'AA2', 'N', 'T']), 'restaurateur': ('NN', ['R', 'EH2', 'S', 'T', 'ER0', 'AH0', 'T', 'ER1']), 'restful': ('NN', ['R', 'EH1', 'S', 'T', 'F', 'AH0', 'L']), 'restitute': ('NN', ['R', 'EH1', 'S', 'T', 'IH0', 'T', 'UW2', 'T']), 'restitution': ('NN', ['R', 'EH2', 'S', 'T', 'IH0', 'T', 'UW1', 'SH', 'AH0', 'N']), 'restive': ('NN', ['R', 'EH1', 'S', 'T', 'IH0', 'V']), 'restless': ('NN', ['R', 'EH1', 'S', 'T', 'L', 'AH0', 'S']), 'restoration': ('NN', ['R', 'EH2', 'S', 'T', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'restorative': ('NN', ['R', 'AH0', 'S', 'T', 'AO1', 'R', 'AH0', 'T', 'IH0', 'V']), 'restored': ('VBN', ['R', 'IH0', 'S', 'T', 'AO1', 'R', 'D']), 'restoring': ('VBG', ['R', 'IH0', 'S', 'T', 'AO1', 'R', 'IH0', 'NG']), 'restore': ('NN', ['R', 'IH0', 'S', 'T', 'AO1', 'R']), 'restorer': ('NN', ['R', 'IH0', 'S', 'T', 'AO1', 'R', 'ER0']), 'restrained': ('VBN', ['R', 'IY0', 'S', 'T', 'R', 'EY1', 'N', 'D']), 'restraining': ('VBG', ['R', 'IY0', 'S', 'T', 'R', 'EY1', 'N', 'IH0', 'NG']), 'restrain': ('NN', ['R', 'IY0', 'S', 'T', 'R', 'EY1', 'N']), 'restraint': ('NN', ['R', 'IH0', 'S', 'T', 'R', 'EY1', 'N', 'T']), 'restrict': ('NN', ['R', 'IY0', 'S', 'T', 'R', 'IH1', 'K', 'T']), 'restricted': ('VBN', ['R', 'IY0', 'S', 'T', 'R', 'IH1', 'K', 'T', 'AH0', 'D']), 'restricting': ('VBG', ['R', 'IY0', 'S', 'T', 'R', 'IH1', 'K', 'T', 'IH0', 'NG']), 'restriction': ('NN', ['R', 'IY0', 'S', 'T', 'R', 'IH1', 'K', 'SH', 'AH0', 'N']), 'restrictive': ('NN', ['R', 'IY0', 'S', 'T', 'R', 'IH1', 'K', 'T', 'IH0', 'V']), 'resulted': ('VBD', ['R', 'IH0', 'Z', 'AH1', 'L', 'T', 'IH0', 'D']), 'resulting': ('VBG', ['R', 'IH0', 'Z', 'AH1', 'L', 'T', 'IH0', 'NG']), 'result': ('NN', ['R', 'IH0', 'Z', 'AH1', 'L', 'T']), 'resultant': ('NN', ['R', 'IY0', 'Z', 'AH1', 'L', 'T', 'AH0', 'N', 'T']), 'resume': ('NN', ['R', 'IH0', 'Z', 'UW1', 'M']), 'resumed': ('VBD', ['R', 'IH0', 'Z', 'UW1', 'M', 'D']), 'resuming': ('VBG', ['R', 'IH0', 'Z', 'UW1', 'M', 'IH0', 'NG']), 'resumption': ('NN', ['R', 'IH0', 'Z', 'AH1', 'M', 'P', 'SH', 'AH0', 'N']), 'resupply': ('NN', ['R', 'IY0', 'S', 'AH0', 'P', 'L', 'AY1']), 'resurgence': ('NN', ['R', 'IY0', 'S', 'ER1', 'JH', 'AH0', 'N', 'S']), 'resurgent': ('NN', ['R', 'IH0', 'S', 'ER1', 'JH', 'AH0', 'N', 'T']), 'resurrect': ('NN', ['R', 'EH2', 'Z', 'ER0', 'EH1', 'K', 'T']), 'resurrection': ('NN', ['R', 'EH2', 'Z', 'ER0', 'EH1', 'K', 'SH', 'AH0', 'N']), 'resuscitate': ('NN', ['R', 'IH0', 'S', 'AH1', 'S', 'IH0', 'T', 'EY2', 'T']), 'resuscitated': ('VBN', ['R', 'IH0', 'S', 'AH1', 'S', 'IH0', 'T', 'EY2', 'T', 'IH0', 'D']), 'resuscitating': ('VBG', ['R', 'IH0', 'S', 'AH1', 'S', 'IH0', 'T', 'EY2', 'T', 'IH0', 'NG']), 'resuscitation': ('NN', ['R', 'IH0', 'S', 'AH2', 'S', 'IH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'ret': ('NN', ['R', 'EH1', 'T']), 'retail': ('JJ', ['R', 'IY1', 'T', 'EY2', 'L']), 'retailed': ('VBN', ['R', 'IY1', 'T', 'EY2', 'L', 'D']), 'retailing': ('NN', ['R', 'IY1', 'T', 'EY2', 'L', 'IH0', 'NG']), 'retailer': ('NN', ['R', 'IY1', 'T', 'EY2', 'L', 'ER0']), 'retained': ('VBN', ['R', 'IH0', 'T', 'EY1', 'N', 'D']), 'retaining': ('VBG', ['R', 'IH0', 'T', 'EY1', 'N', 'IH0', 'NG']), 'retain': ('NN', ['R', 'IH0', 'T', 'EY1', 'N']), 'retainer': ('NN', ['R', 'IH0', 'T', 'EY1', 'N', 'ER0']), 'retake': ('NN', ['R', 'IY1', 'T', 'EY1', 'K']), 'retaliated': ('VBN', ['R', 'IH0', 'T', 'AE1', 'L', 'IY0', 'EY2', 'T', 'IH0', 'D']), 'retaliating': ('VBG', ['R', 'IH0', 'T', 'AE1', 'L', 'IY0', 'EY2', 'T', 'IH0', 'NG']), 'retaliate': ('NN', ['R', 'IH0', 'T', 'AE1', 'L', 'IY0', 'EY2', 'T']), 'retaliation': ('NN', ['R', 'IY0', 'T', 'AE2', 'L', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'retaliatory': ('NN', ['R', 'IY0', 'T', 'AE1', 'L', 'Y', 'AH0', 'T', 'AO2', 'R', 'IY0']), 'retarded': ('VBD', ['R', 'IH0', 'T', 'AA1', 'R', 'D', 'IH0', 'D']), 'retarding': ('VBG', ['R', 'IH0', 'T', 'AA1', 'R', 'D', 'IH0', 'NG']), 'retard': ('NN', ['R', 'IH0', 'T', 'AA1', 'R', 'D']), 'retardation': ('NN', ['R', 'IY0', 'T', 'AA0', 'R', 'D', 'EY1', 'SH', 'AH0', 'N']), 'retell': ('NN', ['R', 'IY0', 'T', 'EH1', 'L']), 'retention': ('NN', ['R', 'IY0', 'T', 'EH1', 'N', 'SH', 'AH0', 'N']), 'reticence': ('NN', ['R', 'EH1', 'T', 'IH0', 'S', 'AH0', 'N', 'S']), 'reticent': ('NN', ['R', 'EH1', 'T', 'IH0', 'S', 'AH0', 'N', 'T']), 'retina': ('NN', ['R', 'EH1', 'T', 'AH0', 'N', 'AH0']), 'retinal': ('JJ', ['R', 'EH1', 'T', 'AH0', 'N', 'AH0', 'L']), 'retinoid': ('NN', ['R', 'EH1', 'T', 'IH0', 'N', 'OY0', 'D']), 'retinue': ('NN', ['R', 'EH1', 'T', 'AH0', 'N', 'UW2']), 'retired': ('VBN', ['R', 'IH0', 'T', 'AY1', 'R', 'D']), 'retiring': ('VBG', ['R', 'IH0', 'T', 'AY1', 'R', 'IH0', 'NG']), 'retire': ('NN', ['R', 'IH0', 'T', 'AY1', 'R']), 'retirement': ('NN', ['R', 'IY0', 'T', 'AY1', 'ER0', 'M', 'AH0', 'N', 'T']), 'retorted': ('VBN', ['R', 'IY0', 'T', 'AO1', 'R', 'T', 'IH0', 'D']), 'retort': ('NN', ['R', 'IY1', 'T', 'AO2', 'R', 'T']), 'retrace': ('NN', ['R', 'IY0', 'T', 'R', 'EY1', 'S']), 'retracted': ('VBN', ['R', 'IY0', 'T', 'R', 'AE1', 'K', 'T', 'AH0', 'D']), 'retracting': ('VBG', ['R', 'IY0', 'T', 'R', 'AE1', 'K', 'T', 'IH0', 'NG']), 'retract': ('NN', ['R', 'IY0', 'T', 'R', 'AE1', 'K', 'T']), 'retractable': ('JJ', ['R', 'IY0', 'T', 'R', 'AE1', 'K', 'T', 'AH0', 'B', 'AH0', 'L']), 'retraction': ('NN', ['R', 'IY0', 'T', 'R', 'AE1', 'K', 'SH', 'AH0', 'N']), 'retread': ('NN', ['R', 'IY0', 'T', 'R', 'EH1', 'D']), 'retreat': ('NN', ['R', 'IY0', 'T', 'R', 'IY1', 'T']), 'retreated': ('VBN', ['R', 'IY0', 'T', 'R', 'IY1', 'T', 'AH0', 'D']), 'retreating': ('VBG', ['R', 'IY0', 'T', 'R', 'IY1', 'T', 'IH0', 'NG']), 'retrenched': ('VBN', ['R', 'IY0', 'T', 'R', 'EH1', 'N', 'CH', 'T']), 'retrenching': ('VBG', ['R', 'IY0', 'T', 'R', 'EH1', 'N', 'CH', 'IH0', 'NG']), 'retrench': ('NN', ['R', 'IY0', 'T', 'R', 'EH1', 'N', 'CH']), 'retrenchment': ('NN', ['R', 'IY0', 'T', 'R', 'EH1', 'N', 'CH', 'M', 'AH0', 'N', 'T']), 'retrial': ('NN', ['R', 'IY0', 'T', 'R', 'AY1', 'AH0', 'L']), 'retribution': ('NN', ['R', 'EH2', 'T', 'R', 'AH0', 'B', 'Y', 'UW1', 'SH', 'AH0', 'N']), 'retrieval': ('NN', ['R', 'IH0', 'T', 'R', 'IY1', 'V', 'AH0', 'L']), 'retrieved': ('VBN', ['R', 'IY0', 'T', 'R', 'IY1', 'V', 'D']), 'retrieving': ('VBG', ['R', 'IY0', 'T', 'R', 'IY1', 'V', 'IH0', 'NG']), 'retrieve': ('NN', ['R', 'IH0', 'T', 'R', 'IY1', 'V']), 'retriever': ('NN', ['R', 'IY0', 'T', 'R', 'IY1', 'V', 'ER0']), 'retroactive': ('JJ', ['R', 'EH2', 'T', 'R', 'OW0', 'AE1', 'K', 'T', 'IH0', 'V']), 'retroactively': ('RB', ['R', 'EH2', 'T', 'R', 'OW0', 'AE1', 'K', 'T', 'IH0', 'V', 'L', 'IY0']), 'retrocession': ('NN', ['R', 'EH2', 'T', 'R', 'OW0', 'S', 'EH1', 'SH', 'AH0', 'N']), 'retrograde': ('NN', ['R', 'EH1', 'T', 'R', 'AH0', 'G', 'R', 'EY2', 'D']), 'retrospect': ('NN', ['R', 'EH1', 'T', 'R', 'AH0', 'S', 'P', 'EH2', 'K', 'T']), 'retrospective': ('NN', ['R', 'EH2', 'T', 'R', 'AH0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'V']), 'retrospectively': ('RB', ['R', 'EH2', 'T', 'R', 'OW0', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'V', 'L', 'IY0']), 'retry': ('NN', ['R', 'IY0', 'T', 'R', 'AY1']), 'returned': ('VBN', ['R', 'IH0', 'T', 'ER1', 'N', 'D']), 'returning': ('VBG', ['R', 'IH0', 'T', 'ER1', 'N', 'IH0', 'NG']), 'return': ('NN', ['R', 'IH0', 'T', 'ER1', 'N']), 'returnable': ('JJ', ['R', 'IY0', 'T', 'ER1', 'N', 'AH0', 'B', 'AH0', 'L']), 'reule': ('NN', ['R', 'UW1', 'L']), 'reunion': ('NN', ['R', 'IY0', 'UW1', 'N', 'Y', 'AH0', 'N']), 'reunite': ('NN', ['R', 'IY2', 'UW0', 'N', 'AY1', 'T']), 'revaluation': ('NN', ['R', 'IY0', 'V', 'AE1', 'L', 'Y', 'UW0', 'EY1', 'SH', 'AH0', 'N']), 'revamp': ('NN', ['R', 'IY0', 'V', 'AE1', 'M', 'P']), 'revealed': ('VBD', ['R', 'IH0', 'V', 'IY1', 'L', 'D']), 'revealing': ('VBG', ['R', 'IH0', 'V', 'IY1', 'L', 'IH0', 'NG']), 'reveal': ('NN', ['R', 'IH0', 'V', 'IY1', 'L']), 'revel': ('NN', ['R', 'EH1', 'V', 'AH0', 'L']), 'reveled': ('VBN', ['R', 'EH1', 'V', 'AH0', 'L', 'D']), 'reveling': ('VBG', ['R', 'EH1', 'V', 'AH0', 'L', 'IH0', 'NG']), 'revelation': ('NN', ['R', 'EH2', 'V', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'reveler': ('NN', ['R', 'EH1', 'V', 'AH0', 'L', 'ER0']), 'revelry': ('NN', ['R', 'EH1', 'V', 'AH0', 'L', 'R', 'IY0']), 'revenge': ('NN', ['R', 'IY0', 'V', 'EH1', 'N', 'JH']), 'revenue': ('NN', ['R', 'EH1', 'V', 'AH0', 'N', 'UW2']), 'reverb': ('NN', ['R', 'IY0', 'V', 'ER1', 'B']), 'reverberate': ('NN', ['R', 'IH0', 'V', 'ER1', 'B', 'ER0', 'AH0', 'T']), 'reverberated': ('VBN', ['R', 'IH0', 'V', 'ER1', 'B', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'reverberating': ('VBG', ['R', 'IH0', 'V', 'ER1', 'B', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'reverberation': ('NN', ['R', 'IY0', 'V', 'ER2', 'B', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'revered': ('VBN', ['R', 'IH0', 'V', 'IH1', 'R', 'D']), 'revering': ('VBG', ['R', 'IH0', 'V', 'IH1', 'R', 'IH0', 'NG']), 'revere': ('RB', ['R', 'IH0', 'V', 'IH1', 'R']), 'reverence': ('NN', ['R', 'EH1', 'V', 'ER0', 'AH0', 'N', 'S']), 'reverend': ('NN', ['R', 'EH1', 'V', 'ER0', 'AH0', 'N', 'D']), 'reverent': ('NN', ['R', 'EH1', 'V', 'ER0', 'AH0', 'N', 'T']), 'reverential': ('NN', ['R', 'EH2', 'V', 'ER0', 'EH1', 'N', 'CH', 'AH0', 'L']), 'reverently': ('RB', ['R', 'EH1', 'V', 'ER0', 'AH0', 'N', 'T', 'L', 'IY0']), 'reveries': ('NNS', ['R', 'EH1', 'V', 'ER0', 'IY0', 'Z']), 'reverie': ('NN', ['R', 'EH1', 'V', 'ER0', 'IY0']), 'reversal': ('NN', ['R', 'IH0', 'V', 'ER1', 'S', 'AH0', 'L']), 'reverse': ('NN', ['R', 'IH0', 'V', 'ER1', 'S']), 'reversed': ('VBN', ['R', 'IH0', 'V', 'ER1', 'S', 'T']), 'reversing': ('VBG', ['R', 'IH0', 'V', 'ER1', 'S', 'IH0', 'NG']), 'reverser': ('NN', ['R', 'IH0', 'V', 'ER1', 'S', 'ER0']), 'reversible': ('JJ', ['R', 'IH0', 'V', 'ER1', 'S', 'AH0', 'B', 'AH0', 'L']), 'reversion': ('NN', ['R', 'IH0', 'V', 'ER1', 'ZH', 'AH0', 'N']), 'reverted': ('VBN', ['R', 'IH0', 'V', 'ER1', 'T', 'IH0', 'D']), 'reverting': ('VBG', ['R', 'IH0', 'V', 'ER1', 'T', 'IH0', 'NG']), 'revert': ('NN', ['R', 'IH0', 'V', 'ER1', 'T']), 'review': ('NN', ['R', 'IY2', 'V', 'Y', 'UW1']), 'reviewer': ('NN', ['R', 'IY0', 'V', 'Y', 'UW1', 'ER0']), 'reviled': ('VBN', ['R', 'IY0', 'V', 'AY1', 'L', 'D']), 'revile': ('NN', ['R', 'IY0', 'V', 'AY1', 'L']), 'revised': ('VBN', ['R', 'IH0', 'V', 'AY1', 'Z', 'D']), 'revising': ('VBG', ['R', 'IH0', 'V', 'AY1', 'Z', 'IH0', 'NG']), 'revise': ('NN', ['R', 'IH0', 'V', 'AY1', 'Z']), 'revision': ('NN', ['R', 'IY0', 'V', 'IH1', 'ZH', 'AH0', 'N']), 'revisit': ('NN', ['R', 'IY0', 'V', 'IH1', 'Z', 'IH0', 'T']), 'revitalize': ('VB', ['R', 'IY0', 'V', 'AY1', 'T', 'AH0', 'L', 'AY2', 'Z']), 'revival': ('NN', ['R', 'IH0', 'V', 'AY1', 'V', 'AH0', 'L']), 'revivalist': ('NN', ['R', 'IY0', 'V', 'AY1', 'V', 'AH0', 'L', 'IH0', 'S', 'T']), 'revived': ('VBN', ['R', 'IH0', 'V', 'AY1', 'V', 'D']), 'reviving': ('VBG', ['R', 'IH0', 'V', 'AY1', 'V', 'IH0', 'NG']), 'revive': ('NN', ['R', 'IH0', 'V', 'AY1', 'V']), 'revocable': ('JJ', ['R', 'EH1', 'V', 'AH0', 'K', 'AH0', 'B', 'AH0', 'L']), 'revocation': ('NN', ['R', 'EH2', 'V', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'revoked': ('VBN', ['R', 'IH0', 'V', 'OW1', 'K', 'T']), 'revoking': ('VBG', ['R', 'IH0', 'V', 'OW1', 'K', 'IH0', 'NG']), 'revoke': ('NN', ['R', 'IH0', 'V', 'OW1', 'K']), 'revolted': ('VBN', ['R', 'IH0', 'V', 'OW1', 'L', 'T', 'IH0', 'D']), 'revolting': ('VBG', ['R', 'IY0', 'V', 'OW1', 'L', 'T', 'IH0', 'NG']), 'revolt': ('NN', ['R', 'IH0', 'V', 'OW1', 'L', 'T']), 'revolution': ('NN', ['R', 'EH2', 'V', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N']), 'revolutionary': ('JJ', ['R', 'EH2', 'V', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N', 'EH2', 'R', 'IY0']), 'revolutionist': ('NN', ['R', 'EH2', 'V', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N', 'IH0', 'S', 'T']), 'revolutionizing': ('VBG', ['R', 'EH2', 'V', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'revolutionize': ('VB', ['R', 'EH2', 'V', 'AH0', 'L', 'UW1', 'SH', 'AH0', 'N', 'AY2', 'Z']), 'revolved': ('VBN', ['R', 'IY0', 'V', 'AA1', 'L', 'V', 'D']), 'revolving': ('VBG', ['R', 'IY0', 'V', 'AA1', 'L', 'V', 'IH0', 'NG']), 'revolve': ('NN', ['R', 'IY0', 'V', 'AA1', 'L', 'V']), 'revolver': ('NN', ['R', 'IH0', 'V', 'AA1', 'L', 'V', 'ER0']), 'revulsion': ('NN', ['R', 'IH0', 'V', 'AH1', 'L', 'SH', 'AH0', 'N']), 'rew': ('NN', ['R', 'UW1']), 'rewarded': ('VBN', ['R', 'IH0', 'W', 'AO1', 'R', 'D', 'IH0', 'D']), 'rewarding': ('VBG', ['R', 'IH0', 'W', 'AO1', 'R', 'D', 'IH0', 'NG']), 'reward': ('NN', ['R', 'IH0', 'W', 'AO1', 'R', 'D']), 'reword': ('NN', ['R', 'IY0', 'W', 'ER1', 'D']), 'rewrite': ('NN', ['R', 'IY0', 'R', 'AY1', 'T']), 'rex': ('NN', ['R', 'EH1', 'K', 'S']), 'reynard': ('NN', ['R', 'EY1', 'N', 'ER0', 'D']), 'rhapsodic': ('NN', ['R', 'AE0', 'P', 'S', 'AA1', 'D', 'IH0', 'K']), 'rhapsodize': ('VB', ['R', 'AE1', 'P', 'S', 'AH0', 'D', 'AY2', 'Z']), 'rhapsody': ('NN', ['R', 'AE1', 'P', 'S', 'AH0', 'D', 'IY0']), 'rhea': ('NN', ['R', 'IY1', 'AH0']), 'rhein': ('NN', ['R', 'AY1', 'N']), 'rhesus': ('NN', ['R', 'IY1', 'S', 'AH0', 'S']), 'rhetoric': ('NN', ['R', 'EH1', 'T', 'ER0', 'IH0', 'K']), 'rhetorical': ('JJ', ['R', 'IH0', 'T', 'AO1', 'R', 'IH0', 'K', 'AH0', 'L']), 'rhetorician': ('NN', ['R', 'EH2', 'T', 'ER0', 'IH1', 'SH', 'AH0', 'N']), 'rheumatic': ('JJ', ['R', 'UW0', 'M', 'AE1', 'T', 'IH0', 'K']), 'rheumatism': ('NN', ['R', 'UW1', 'M', 'AH0', 'T', 'IH2', 'Z', 'AH0', 'M']), 'rhine': ('NN', ['R', 'AY1', 'N']), 'rhinestone': ('NN', ['R', 'AY1', 'N', 'S', 'T', 'OW2', 'N']), 'rhino': ('NN', ['R', 'AY1', 'N', 'OW2']), 'rhinoceros': ('NNS', ['R', 'AY0', 'N', 'AA1', 'S', 'ER0', 'AH0', 'S']), 'rhizoid': ('NN', ['R', 'AY1', 'Z', 'OY2', 'D']), 'rhizome': ('NN', ['R', 'AY1', 'Z', 'OW2', 'M']), 'rhodium': ('NN', ['R', 'OW1', 'D', 'IY0', 'AH0', 'M']), 'rhododendron': ('NN', ['R', 'OW2', 'D', 'AH0', 'D', 'EH1', 'N', 'D', 'R', 'AH0', 'N']), 'rhodopsin': ('NN', ['R', 'OW0', 'D', 'AA1', 'P', 'S', 'AH0', 'N']), 'rhubarb': ('NN', ['R', 'UW1', 'B', 'AA2', 'R', 'B']), 'rhyme': ('NN', ['R', 'AY1', 'M']), 'rhymed': ('VBN', ['R', 'AY1', 'M', 'D']), 'rhyming': ('VBG', ['R', 'AY1', 'M', 'IH0', 'NG']), 'rhymer': ('NN', ['R', 'AY1', 'M', 'ER0']), 'rhythm': ('NN', ['R', 'IH1', 'DH', 'AH0', 'M']), 'rhythmic': ('NN', ['R', 'IH1', 'DH', 'M', 'IH0', 'K']), 'rhythmically': ('RB', ['R', 'IH1', 'DH', 'M', 'IH0', 'K', 'L', 'IY0']), 'rial': ('NN', ['R', 'AY1', 'AH0', 'L']), 'rib': ('NN', ['R', 'IH1', 'B']), 'ribbed': ('NN', ['R', 'IH1', 'B', 'D']), 'ribbing': ('VBG', ['R', 'IH1', 'B', 'IH0', 'NG']), 'ribald': ('NN', ['R', 'AY1', 'B', 'AA0', 'L', 'D']), 'ribaldry': ('NN', ['R', 'AY1', 'B', 'AA0', 'L', 'D', 'R', 'IY0']), 'ribbon': ('NN', ['R', 'IH1', 'B', 'AH0', 'N']), 'rice': ('NN', ['R', 'AY1', 'S']), 'rich': ('JJ', ['R', 'IH1', 'CH']), 'riches': ('NNS', ['R', 'IH1', 'CH', 'AH0', 'Z']), 'richly': ('RB', ['R', 'IH1', 'CH', 'L', 'IY0']), 'richness': ('NN', ['R', 'IH1', 'CH', 'N', 'AH0', 'S']), 'rick': ('NN', ['R', 'IH1', 'K']), 'ricker': ('NN', ['R', 'IH1', 'K', 'ER0']), 'rickets': ('NNS', ['R', 'IH1', 'K', 'IH0', 'T', 'S']), 'rickety': ('NN', ['R', 'IH1', 'K', 'AH0', 'T', 'IY0']), 'ricochet': ('NN', ['R', 'IH1', 'K', 'AH0', 'SH', 'EY2']), 'rid': ('JJ', ['R', 'IH1', 'D']), 'ridding': ('VBG', ['R', 'IH1', 'D', 'IH0', 'NG']), 'ridable': ('JJ', ['R', 'AY1', 'D', 'AH0', 'B', 'AH0', 'L']), 'riddance': ('NN', ['R', 'IH1', 'D', 'AH0', 'N', 'S']), 'ridden': ('NN', ['R', 'IH1', 'D', 'AH0', 'N']), 'ridder': ('NN', ['R', 'IH1', 'D', 'ER0']), 'riddle': ('NN', ['R', 'IH1', 'D', 'AH0', 'L']), 'riddled': ('VBD', ['R', 'IH1', 'D', 'AH0', 'L', 'D']), 'riddler': ('NN', ['R', 'IH1', 'D', 'L', 'ER0']), 'rode': ('NN', ['R', 'OW1', 'D']), 'riding': ('VBG', ['R', 'AY1', 'D', 'IH0', 'NG']), 'ride': ('NN', ['R', 'AY1', 'D']), 'rideau': ('NN', ['R', 'IH0', 'D', 'OW1']), 'riden': ('NN', ['R', 'AY1', 'D', 'AH0', 'N']), 'rider': ('NN', ['R', 'AY1', 'D', 'ER0']), 'ridge': ('NN', ['R', 'IH1', 'JH']), 'ridged': ('VBN', ['R', 'IH1', 'JH', 'D']), 'ridicule': ('NN', ['R', 'IH1', 'D', 'AH0', 'K', 'Y', 'UW2', 'L']), 'ridiculed': ('VBN', ['R', 'IH1', 'D', 'AH0', 'K', 'Y', 'UW2', 'L', 'D']), 'ridiculing': ('VBG', ['R', 'IH1', 'D', 'AH0', 'K', 'Y', 'UW2', 'L', 'IH0', 'NG']), 'ridiculous': ('JJ', ['R', 'IH0', 'D', 'IH1', 'K', 'Y', 'AH0', 'L', 'AH0', 'S']), 'rief': ('NN', ['R', 'IY1', 'F']), 'rife': ('NN', ['R', 'AY1', 'F']), 'riffle': ('NN', ['R', 'IH1', 'F', 'AH0', 'L']), 'riffraff': ('NN', ['R', 'IH1', 'F', 'R', 'AE2', 'F']), 'rifled': ('VBN', ['R', 'AY1', 'F', 'AH0', 'L', 'D']), 'rifling': ('VBG', ['R', 'AY1', 'F', 'L', 'IH0', 'NG']), 'rifle': ('NN', ['R', 'AY1', 'F', 'AH0', 'L']), 'rifleman': ('NN', ['R', 'AY1', 'F', 'AH0', 'L', 'M', 'AH0', 'N']), 'rift': ('NN', ['R', 'IH1', 'F', 'T']), 'rig': ('NN', ['R', 'IH1', 'G']), 'rigged': ('VBN', ['R', 'IH1', 'G', 'D']), 'rigging': ('VBG', ['R', 'IH1', 'G', 'IH0', 'NG']), 'rigel': ('NN', ['R', 'AY1', 'JH', 'AH0', 'L']), 'riggle': ('NN', ['R', 'IH1', 'G', 'AH0', 'L']), 'right': ('NN', ['R', 'AY1', 'T']), 'righted': ('VBN', ['R', 'AY1', 'T', 'IH0', 'D']), 'righteous': ('JJ', ['R', 'AY1', 'CH', 'AH0', 'S']), 'righteously': ('RB', ['R', 'AY1', 'CH', 'AH0', 'S', 'L', 'IY0']), 'righteousness': ('NN', ['R', 'AY1', 'CH', 'AH0', 'S', 'N', 'AH0', 'S']), 'righter': ('NN', ['R', 'AY1', 'T', 'ER0']), 'rightful': ('NN', ['R', 'AY1', 'T', 'F', 'AH0', 'L']), 'rightfully': ('RB', ['R', 'AY1', 'T', 'F', 'AH0', 'L', 'IY0']), 'rightly': ('RB', ['R', 'AY1', 'T', 'L', 'IY0']), 'rightness': ('NN', ['R', 'AY1', 'T', 'N', 'AH0', 'S']), 'rightward': ('NN', ['R', 'AY1', 'T', 'W', 'ER0', 'D']), 'rigid': ('JJ', ['R', 'IH1', 'JH', 'AH0', 'D']), 'rigidity': ('NN', ['R', 'IH0', 'JH', 'IH1', 'D', 'AH0', 'T', 'IY0']), 'rigidly': ('RB', ['R', 'IH1', 'JH', 'IH0', 'D', 'L', 'IY0']), 'rigor': ('NN', ['R', 'IH1', 'G', 'ER0']), 'rigorous': ('JJ', ['R', 'IH1', 'G', 'ER0', 'AH0', 'S']), 'riled': ('VBN', ['R', 'AY1', 'L', 'D']), 'rile': ('NN', ['R', 'AY1', 'L']), 'rill': ('NN', ['R', 'IH1', 'L']), 'rim': ('NN', ['R', 'IH1', 'M']), 'rimmed': ('VBN', ['R', 'IH1', 'M', 'D']), 'rima': ('NN', ['R', 'IY1', 'M', 'AH0']), 'rimer': ('NN', ['R', 'AY1', 'M', 'ER0']), 'rimmer': ('NN', ['R', 'IH1', 'M', 'ER0']), 'rind': ('NN', ['R', 'AY1', 'N', 'D']), 'rine': ('NN', ['R', 'AY1', 'N']), 'rung': ('NN', ['R', 'AH1', 'NG']), 'ringing': ('VBG', ['R', 'IH1', 'NG', 'IH0', 'NG']), 'ring': ('NN', ['R', 'IH1', 'NG']), 'ringed': ('VBN', ['R', 'IH1', 'NG', 'D']), 'ringer': ('NN', ['R', 'IH1', 'NG', 'ER0']), 'ringleader': ('NN', ['R', 'IH1', 'NG', 'L', 'IY2', 'D', 'ER0']), 'ringmaster': ('NN', ['R', 'IH1', 'NG', 'M', 'AE2', 'S', 'T', 'ER0']), 'ringneck': ('NN', ['R', 'IH1', 'NG', 'N', 'EH2', 'K']), 'rink': ('NN', ['R', 'IH1', 'NG', 'K']), 'rinker': ('NN', ['R', 'IH1', 'NG', 'K', 'ER0']), 'rinsing': ('VBG', ['R', 'IH1', 'N', 'S', 'IH0', 'NG']), 'rinse': ('NN', ['R', 'IH1', 'N', 'S']), 'riot': ('NN', ['R', 'AY1', 'AH0', 'T']), 'rioted': ('VBN', ['R', 'AY1', 'AH0', 'T', 'IH0', 'D']), 'rioting': ('NN', ['R', 'AY1', 'AH0', 'T', 'IH0', 'NG']), 'rioter': ('NN', ['R', 'AY1', 'AH0', 'T', 'ER0']), 'riotous': ('JJ', ['R', 'AY1', 'AH0', 'T', 'AH0', 'S']), 'rip': ('NN', ['R', 'IH1', 'P']), 'ripped': ('NN', ['R', 'IH1', 'P', 'T']), 'ripping': ('VBG', ['R', 'IH1', 'P', 'IH0', 'NG']), 'ripe': ('NN', ['R', 'AY1', 'P']), 'ripened': ('VBN', ['R', 'AY1', 'P', 'AH0', 'N', 'D']), 'ripening': ('VBG', ['R', 'AY1', 'P', 'AH0', 'N', 'IH0', 'NG']), 'ripen': ('VB', ['R', 'AY1', 'P', 'AH0', 'N']), 'ripper': ('NN', ['R', 'IH1', 'P', 'ER0']), 'ripple': ('NN', ['R', 'IH1', 'P', 'AH0', 'L']), 'rippled': ('VBN', ['R', 'IH1', 'P', 'AH0', 'L', 'D']), 'rippling': ('VBG', ['R', 'IH1', 'P', 'AH0', 'L', 'IH0', 'NG']), 'rose': ('VBD', ['R', 'OW1', 'Z']), 'risen': ('NN', ['R', 'IH1', 'Z', 'AH0', 'N']), 'rising': ('VBG', ['R', 'AY1', 'Z', 'IH0', 'NG']), 'rise': ('NN', ['R', 'AY1', 'Z']), 'riser': ('NN', ['R', 'AY1', 'Z', 'ER0']), 'rish': ('NN', ['R', 'IH1', 'SH']), 'risk': ('NN', ['R', 'IH1', 'S', 'K']), 'risked': ('VBN', ['R', 'IH1', 'S', 'K', 'T']), 'risking': ('VBG', ['R', 'IH1', 'S', 'K', 'IH0', 'NG']), 'risky': ('JJ', ['R', 'IH1', 'S', 'K', 'IY0']), 'risse': ('NN', ['R', 'IH1', 'S']), 'rist': ('NN', ['R', 'IH1', 'S', 'T']), 'rite': ('NN', ['R', 'AY1', 'T']), 'ritual': ('JJ', ['R', 'IH1', 'CH', 'UW0', 'AH0', 'L']), 'ritualistic': ('JJ', ['R', 'IH2', 'CH', 'UW0', 'AH0', 'L', 'IH1', 'S', 'T', 'IH0', 'K']), 'rival': ('NN', ['R', 'AY1', 'V', 'AH0', 'L']), 'rivaled': ('VBN', ['R', 'AY1', 'V', 'AH0', 'L', 'D']), 'rivaling': ('VBG', ['R', 'AY1', 'V', 'AH0', 'L', 'IH0', 'NG']), 'rivalries': ('NNS', ['R', 'AY1', 'V', 'AH0', 'L', 'R', 'IY0', 'Z']), 'rivalry': ('NN', ['R', 'AY1', 'V', 'AH0', 'L', 'R', 'IY0']), 'riven': ('NN', ['R', 'IH1', 'V', 'AH0', 'N']), 'river': ('NN', ['R', 'IH1', 'V', 'ER0']), 'riverside': ('NN', ['R', 'IH1', 'V', 'ER0', 'S', 'AY2', 'D']), 'rivet': ('NN', ['R', 'IH1', 'V', 'AH0', 'T']), 'riveted': ('VBN', ['R', 'IH1', 'V', 'AH0', 'T', 'IH0', 'D']), 'riveting': ('VBG', ['R', 'IH1', 'V', 'AH0', 'T', 'IH0', 'NG']), 'roach': ('NN', ['R', 'OW1', 'CH']), 'road': ('NN', ['R', 'OW1', 'D']), 'roadside': ('NN', ['R', 'OW1', 'D', 'S', 'AY2', 'D']), 'roadster': ('NN', ['R', 'OW1', 'D', 'S', 'T', 'ER0']), 'roadway': ('NN', ['R', 'OW1', 'D', 'W', 'EY2']), 'roamed': ('VBN', ['R', 'OW1', 'M', 'D']), 'roaming': ('VBG', ['R', 'OW1', 'M', 'IH0', 'NG']), 'roam': ('NN', ['R', 'OW1', 'M']), 'roan': ('NN', ['R', 'OW1', 'N']), 'roared': ('VBN', ['R', 'AO1', 'R', 'D']), 'roaring': ('VBG', ['R', 'AO1', 'R', 'IH0', 'NG']), 'roar': ('NN', ['R', 'AO1', 'R']), 'roasted': ('VBN', ['R', 'OW1', 'S', 'T', 'AH0', 'D']), 'roasting': ('VBG', ['R', 'OW1', 'S', 'T', 'IH0', 'NG']), 'roast': ('NN', ['R', 'OW1', 'S', 'T']), 'roaster': ('NN', ['R', 'OW1', 'S', 'T', 'ER0']), 'rob': ('NN', ['R', 'AA1', 'B']), 'robbed': ('NN', ['R', 'AA1', 'B', 'D']), 'robbing': ('VBG', ['R', 'AA1', 'B', 'IH0', 'NG']), 'robber': ('NN', ['R', 'AA1', 'B', 'ER0']), 'robberies': ('NNS', ['R', 'AA1', 'B', 'ER0', 'IY0', 'Z']), 'robbery': ('NN', ['R', 'AA1', 'B', 'ER0', 'IY0']), 'robbin': ('NN', ['R', 'AA1', 'B', 'IH0', 'N']), 'robe': ('NN', ['R', 'OW1', 'B']), 'robed': ('NN', ['R', 'OW1', 'B', 'D']), 'robert': ('NN', ['R', 'AA1', 'B', 'ER0', 'T']), 'robin': ('NN', ['R', 'AA1', 'B', 'AH0', 'N']), 'robinia': ('NN', ['R', 'OW0', 'B', 'IY1', 'N', 'IY0', 'AH0']), 'robust': ('NN', ['R', 'OW0', 'B', 'AH1', 'S', 'T']), 'robustly': ('RB', ['R', 'OW2', 'B', 'AH1', 'S', 'T', 'L', 'IY0']), 'robustness': ('NN', ['R', 'OW0', 'B', 'AH1', 'S', 'T', 'N', 'AH0', 'S']), 'roc': ('NN', ['R', 'AA1', 'K']), 'roche': ('NN', ['R', 'OW1', 'CH']), 'rochelle': ('NN', ['R', 'OW0', 'SH', 'EH1', 'L']), 'rock': ('NN', ['R', 'AA1', 'K']), 'rocked': ('VBN', ['R', 'AA1', 'K', 'T']), 'rocking': ('VBG', ['R', 'AA1', 'K', 'IH0', 'NG']), 'rockaway': ('NN', ['R', 'AA1', 'K', 'AH0', 'W', 'EY2']), 'rocker': ('NN', ['R', 'AA1', 'K', 'ER0']), 'rocket': ('NN', ['R', 'AA1', 'K', 'AH0', 'T']), 'rocketed': ('VBN', ['R', 'AA1', 'K', 'AH0', 'T', 'IH0', 'D']), 'rocketing': ('VBG', ['R', 'AA1', 'K', 'AH0', 'T', 'IH0', 'NG']), 'rockrose': ('VB', ['R', 'AA1', 'K', 'R', 'OW2', 'Z']), 'rockwood': ('NN', ['R', 'AA1', 'K', 'W', 'UH2', 'D']), 'rocky': ('NN', ['R', 'AA1', 'K', 'IY0']), 'rococo': ('NN', ['R', 'AH0', 'K', 'OW1', 'K', 'OW2']), 'rod': ('NN', ['R', 'AA1', 'D']), 'roddy': ('NN', ['R', 'AA1', 'D', 'IY0']), 'rodent': ('NN', ['R', 'OW1', 'D', 'AH0', 'N', 'T']), 'rodeo': ('NN', ['R', 'OW1', 'D', 'IY0', 'OW2']), 'rodge': ('NN', ['R', 'AA1', 'JH']), 'rody': ('NN', ['R', 'OW1', 'D', 'IY0']), 'roe': ('NN', ['R', 'OW1']), 'roebuck': ('NN', ['R', 'OW1', 'B', 'AH2', 'K']), 'roed': ('NN', ['R', 'OW1', 'D']), 'rogue': ('NN', ['R', 'OW1', 'G']), 'roiled': ('VBN', ['R', 'OY1', 'L', 'D']), 'roiling': ('VBG', ['R', 'OY1', 'L', 'IH0', 'NG']), 'roil': ('NN', ['R', 'OY1', 'L']), 'roister': ('NN', ['R', 'OY1', 'S', 'T', 'ER0']), 'role': ('NN', ['R', 'OW1', 'L']), 'rolled': ('VBN', ['R', 'OW1', 'L', 'D']), 'rolling': ('VBG', ['R', 'OW1', 'L', 'IH0', 'NG']), 'roll': ('NN', ['R', 'OW1', 'L']), 'roller': ('NN', ['R', 'OW1', 'L', 'ER0']), 'rolley': ('NN', ['R', 'AA1', 'L', 'IY0']), 'rollicking': ('VBG', ['R', 'AA1', 'L', 'IH0', 'K', 'IH0', 'NG']), 'roman': ('NN', ['R', 'OW1', 'M', 'AH0', 'N']), 'romance': ('NN', ['R', 'OW0', 'M', 'AE1', 'N', 'S']), 'romancing': ('VBG', ['R', 'OW0', 'M', 'AE1', 'N', 'S', 'IH0', 'NG']), 'romanesque': ('NN', ['R', 'OW2', 'M', 'AH0', 'N', 'EH1', 'S', 'K']), 'romantic': ('JJ', ['R', 'OW0', 'M', 'AE1', 'N', 'T', 'IH0', 'K']), 'romanticism': ('NN', ['R', 'OW0', 'M', 'AE1', 'N', 'T', 'AH0', 'S', 'IH2', 'Z', 'AH0', 'M']), 'romany': ('NN', ['R', 'AA1', 'M', 'AH0', 'N', 'IY0']), 'romping': ('VBG', ['R', 'AA1', 'M', 'P', 'IH0', 'NG']), 'romp': ('NN', ['R', 'AA1', 'M', 'P']), 'ronco': ('NN', ['R', 'OW1', 'N', 'K', 'OW0']), 'rondeau': ('NN', ['R', 'AA0', 'N', 'D', 'OW1']), 'rong': ('NN', ['R', 'AO1', 'NG']), 'ronne': ('NN', ['R', 'AA1', 'N']), 'rood': ('NN', ['R', 'UW1', 'D']), 'roof': ('NN', ['R', 'UW1', 'F']), 'roofed': ('NN', ['R', 'UW1', 'F', 'T']), 'roofing': ('VBG', ['R', 'UW1', 'F', 'IH0', 'NG']), 'roofer': ('NN', ['R', 'UW1', 'F', 'ER0']), 'rooftree': ('NN', ['R', 'UW1', 'F', 'T', 'R', 'IY2']), 'rook': ('NN', ['R', 'UH1', 'K']), 'room': ('NN', ['R', 'UW1', 'M']), 'rooming': ('VBG', ['R', 'UW1', 'M', 'IH0', 'NG']), 'roomful': ('NN', ['R', 'UW1', 'M', 'F', 'UH2', 'L']), 'roominess': ('NN', ['R', 'UW1', 'M', 'IY0', 'N', 'AH0', 'S']), 'roommate': ('NN', ['R', 'UW1', 'M', 'EY2', 'T']), 'roomy': ('NN', ['R', 'UW1', 'M', 'IY0']), 'roop': ('NN', ['R', 'UW1', 'P']), 'roost': ('NN', ['R', 'UW1', 'S', 'T']), 'roosted': ('VBN', ['R', 'UW1', 'S', 'T', 'IH0', 'D']), 'rooster': ('NN', ['R', 'UW1', 'S', 'T', 'ER0']), 'root': ('NN', ['R', 'UW1', 'T']), 'rooted': ('VBN', ['R', 'UW1', 'T', 'AH0', 'D']), 'rooting': ('VBG', ['R', 'UW1', 'T', 'IH0', 'NG']), 'rooter': ('NN', ['R', 'UW1', 'T', 'ER0']), 'rootless': ('NN', ['R', 'UW1', 'T', 'L', 'AH0', 'S']), 'rope': ('NN', ['R', 'OW1', 'P']), 'roped': ('NN', ['R', 'OW1', 'P', 'T']), 'roping': ('VBG', ['R', 'OW1', 'P', 'IH0', 'NG']), 'roper': ('NN', ['R', 'OW1', 'P', 'ER0']), 'rory': ('NN', ['R', 'AO1', 'R', 'IY0']), 'rosalia': ('NN', ['R', 'OW0', 'Z', 'AH0', 'L', 'IY1', 'AH0']), 'rosaries': ('NNS', ['R', 'OW1', 'Z', 'ER0', 'IY0', 'Z']), 'rosary': ('NN', ['R', 'OW1', 'Z', 'ER0', 'IY0']), 'roseate': ('NN', ['R', 'OW1', 'Z', 'IY0', 'AH0', 'T']), 'rosebud': ('NN', ['R', 'OW1', 'Z', 'B', 'AH0', 'D']), 'rosebush': ('NN', ['R', 'OW1', 'Z', 'B', 'UH2', 'SH']), 'rosella': ('NN', ['R', 'OW0', 'Z', 'EH1', 'L', 'AH0']), 'rosemary': ('NN', ['R', 'OW1', 'Z', 'M', 'EH2', 'R', 'IY0']), 'rosen': ('NN', ['R', 'OW1', 'Z', 'AH0', 'N']), 'roser': ('NN', ['R', 'OW1', 'Z', 'ER0']), 'rosette': ('NN', ['R', 'AH0', 'S', 'EH1', 'T']), 'rosewood': ('NN', ['R', 'OW1', 'Z', 'W', 'UH2', 'D']), 'rosier': ('NN', ['R', 'OW1', 'Z', 'IY0', 'ER0']), 'rosin': ('NN', ['R', 'AA1', 'Z', 'AH0', 'N']), 'ross': ('NN', ['R', 'AA1', 'S']), 'rost': ('NN', ['R', 'AA1', 'S', 'T']), 'roster': ('NN', ['R', 'AA1', 'S', 'T', 'ER0']), 'rostrum': ('NN', ['R', 'AA1', 'S', 'T', 'R', 'AH0', 'M']), 'rosy': ('JJ', ['R', 'OW1', 'Z', 'IY0']), 'rotted': ('VBN', ['R', 'AA1', 'T', 'IH0', 'D']), 'rotting': ('VBG', ['R', 'AA1', 'T', 'IH0', 'NG']), 'rot': ('NN', ['R', 'AA1', 'T']), 'rota': ('NN', ['R', 'OW1', 'T', 'AH0']), 'rotary': ('NN', ['R', 'OW1', 'T', 'ER0', 'IY0']), 'rotate': ('NN', ['R', 'OW1', 'T', 'EY2', 'T']), 'rotated': ('VBN', ['R', 'OW1', 'T', 'EY2', 'T', 'IH0', 'D']), 'rotating': ('VBG', ['R', 'OW1', 'T', 'EY2', 'T', 'IH0', 'NG']), 'rotation': ('NN', ['R', 'OW0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'rote': ('NN', ['R', 'OW1', 'T']), 'rotella': ('NN', ['R', 'OW0', 'T', 'EH1', 'L', 'AH0']), 'rother': ('NN', ['R', 'AO1', 'TH', 'ER0']), 'rotten': ('VB', ['R', 'AA1', 'T', 'AH0', 'N']), 'rotund': ('NN', ['R', 'OW0', 'T', 'AH1', 'N', 'D']), 'rotunda': ('NN', ['R', 'OW0', 'T', 'AH1', 'N', 'D', 'AH0']), 'rotundo': ('NN', ['R', 'OW0', 'T', 'UW1', 'N', 'D', 'OW0']), 'rouble': ('JJ', ['R', 'UW1', 'B', 'AH0', 'L']), 'rouge': ('NN', ['R', 'UW1', 'ZH']), 'rough': ('NN', ['R', 'AH1', 'F']), 'roughly': ('RB', ['R', 'AH1', 'F', 'L', 'IY0']), 'roughness': ('NN', ['R', 'AH1', 'F', 'N', 'AH0', 'S']), 'roughshod': ('NN', ['R', 'AH1', 'F', 'SH', 'AA1', 'D']), 'rought': ('NN', ['R', 'AO1', 'T']), 'rouleau': ('NN', ['R', 'UW0', 'L', 'OW1']), 'roulette': ('NN', ['R', 'UW0', 'L', 'EH1', 'T']), 'round': ('NN', ['R', 'AW1', 'N', 'D']), 'rounded': ('VBN', ['R', 'AW1', 'N', 'D', 'AH0', 'D']), 'rounding': ('VBG', ['R', 'AW1', 'N', 'D', 'IH0', 'NG']), 'roundabout': ('NN', ['R', 'AW1', 'N', 'D', 'AH0', 'B', 'AW2', 'T']), 'rounder': ('NN', ['R', 'AW1', 'N', 'D', 'ER0']), 'roundhead': ('NN', ['R', 'AW1', 'N', 'D', 'HH', 'EH2', 'D']), 'roundhouse': ('NN', ['R', 'AW1', 'N', 'D', 'HH', 'AW2', 'S']), 'roundly': ('RB', ['R', 'AW1', 'N', 'D', 'L', 'IY0']), 'roundworm': ('NN', ['R', 'AW1', 'N', 'D', 'W', 'ER0', 'M']), 'roundy': ('NN', ['R', 'AW1', 'N', 'D', 'IY0']), 'rouse': ('NN', ['R', 'AW1', 'S']), 'roused': ('VBN', ['R', 'AW1', 'Z', 'D']), 'rousing': ('VBG', ['R', 'AW1', 'Z', 'IH0', 'NG']), 'rouser': ('NN', ['R', 'AW1', 'Z', 'ER0']), 'rout': ('NN', ['R', 'AW1', 'T']), 'routed': ('VBN', ['R', 'UW1', 'T', 'IH0', 'D']), 'routing': ('VBG', ['R', 'AW1', 'T', 'IH0', 'NG']), 'route': ('NN', ['R', 'UW1', 'T']), 'router': ('NN', ['R', 'UW1', 'T', 'ER0']), 'routine': ('NN', ['R', 'UW0', 'T', 'IY1', 'N']), 'roux': ('NN', ['R', 'UW1']), 'roving': ('VBG', ['R', 'OW1', 'V', 'IH0', 'NG']), 'rover': ('NN', ['R', 'OW1', 'V', 'ER0']), 'row': ('NN', ['R', 'OW1']), 'rowed': ('NN', ['R', 'OW1', 'D']), 'rowing': ('VBG', ['R', 'OW1', 'IH0', 'NG']), 'rowan': ('NN', ['R', 'OW1', 'AH0', 'N']), 'rowboat': ('NN', ['R', 'OW1', 'B', 'OW2', 'T']), 'rowdies': ('NNS', ['R', 'AW1', 'D', 'IY0', 'Z']), 'rowdy': ('NN', ['R', 'AW1', 'D', 'IY0']), 'rowen': ('NN', ['R', 'OW1', 'AH0', 'N']), 'rower': ('NN', ['R', 'OW1', 'ER0']), 'roy': ('NN', ['R', 'OY1']), 'royal': ('NN', ['R', 'OY1', 'AH0', 'L']), 'royalist': ('NN', ['R', 'OY1', 'AH0', 'L', 'IH0', 'S', 'T']), 'royally': ('RB', ['R', 'OY1', 'AH0', 'L', 'IY0']), 'royalties': ('NNS', ['R', 'OY1', 'AH0', 'L', 'T', 'IY0', 'Z']), 'royalty': ('NN', ['R', 'OY1', 'AH0', 'L', 'T', 'IY0']), 'royster': ('NN', ['R', 'OY1', 'S', 'T', 'ER0']), 'rubbed': ('NN', ['R', 'AH1', 'B', 'D']), 'rubbing': ('VBG', ['R', 'AH1', 'B', 'IH0', 'NG']), 'rub': ('NN', ['R', 'AH1', 'B']), 'rubber': ('NN', ['R', 'AH1', 'B', 'ER0']), 'rubbish': ('NN', ['R', 'AH1', 'B', 'IH0', 'SH']), 'rubble': ('NN', ['R', 'AH1', 'B', 'AH0', 'L']), 'rubella': ('NN', ['R', 'UW0', 'B', 'EH1', 'L', 'AH0']), 'rubicon': ('NN', ['R', 'UW1', 'B', 'IH0', 'K', 'AO0', 'N']), 'rubin': ('NN', ['R', 'UW1', 'B', 'IH0', 'N']), 'ruble': ('NN', ['R', 'UW1', 'B', 'AH0', 'L']), 'rubric': ('NN', ['R', 'UW1', 'B', 'R', 'IH0', 'K']), 'rubies': ('NNS', ['R', 'UW1', 'B', 'IY0', 'Z']), 'ruby': ('NN', ['R', 'UW1', 'B', 'IY0']), 'ruck': ('NN', ['R', 'AH1', 'K']), 'rud': ('NN', ['R', 'AH1', 'D']), 'rudd': ('NN', ['R', 'AH1', 'D']), 'rudder': ('NN', ['R', 'AH1', 'D', 'ER0']), 'rudderless': ('NN', ['R', 'AH1', 'D', 'ER0', 'L', 'AH0', 'S']), 'ruddle': ('NN', ['R', 'AH1', 'D', 'AH0', 'L']), 'ruddock': ('NN', ['R', 'AH1', 'D', 'AH0', 'K']), 'ruddy': ('NN', ['R', 'AH1', 'D', 'IY0']), 'rude': ('NN', ['R', 'UW1', 'D']), 'rudiment': ('NN', ['R', 'UW1', 'D', 'IH0', 'M', 'AH0', 'N', 'T']), 'rudimentary': ('JJ', ['R', 'UW2', 'D', 'AH0', 'M', 'EH1', 'N', 'T', 'ER0', 'IY0']), 'rue': ('NN', ['R', 'UW1']), 'rueful': ('NN', ['R', 'UW1', 'F', 'AH0', 'L']), 'ruelle': ('NN', ['R', 'UW2', 'EH1', 'L']), 'ruff': ('NN', ['R', 'AH1', 'F']), 'ruffing': ('VBG', ['R', 'AH1', 'F', 'IH0', 'NG']), 'ruffin': ('NN', ['R', 'AH1', 'F', 'IH0', 'N']), 'ruffled': ('VBN', ['R', 'AH1', 'F', 'AH0', 'L', 'D']), 'ruffling': ('VBG', ['R', 'AH1', 'F', 'AH0', 'L', 'IH0', 'NG']), 'ruffle': ('NN', ['R', 'AH1', 'F', 'AH0', 'L']), 'rug': ('NN', ['R', 'AH1', 'G']), 'rugged': ('VBN', ['R', 'AH1', 'G', 'AH0', 'D']), 'ruin': ('NN', ['R', 'UW1', 'AH0', 'N']), 'ruined': ('VBN', ['R', 'UW1', 'AH0', 'N', 'D']), 'ruining': ('VBG', ['R', 'UW1', 'IH0', 'N', 'IH0', 'NG']), 'ruinous': ('JJ', ['R', 'UW1', 'AH0', 'N', 'AH0', 'S']), 'rule': ('NN', ['R', 'UW1', 'L']), 'ruled': ('VBN', ['R', 'UW1', 'L', 'D']), 'ruling': ('NN', ['R', 'UW1', 'L', 'IH0', 'NG']), 'ruler': ('NN', ['R', 'UW1', 'L', 'ER0']), 'rum': ('NN', ['R', 'AH1', 'M']), 'rumble': ('JJ', ['R', 'AH1', 'M', 'B', 'AH0', 'L']), 'rumbling': ('VBG', ['R', 'AH1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'rumen': ('NNS', ['R', 'UW1', 'M', 'AH0', 'N']), 'ruminant': ('NN', ['R', 'UW1', 'M', 'AH0', 'N', 'AH0', 'N', 'T']), 'ruminated': ('VBN', ['R', 'UW1', 'M', 'IH0', 'N', 'EY2', 'T', 'IH0', 'D']), 'ruminate': ('NN', ['R', 'UW1', 'M', 'IH0', 'N', 'EY2', 'T']), 'rumination': ('NN', ['R', 'UW2', 'M', 'IH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'rummage': ('NN', ['R', 'AH1', 'M', 'IH0', 'JH']), 'rummaged': ('VBN', ['R', 'AH1', 'M', 'IH0', 'JH', 'D']), 'rummaging': ('VBG', ['R', 'AH1', 'M', 'IH0', 'JH', 'IH0', 'NG']), 'rummy': ('NN', ['R', 'AH1', 'M', 'IY0']), 'rumney': ('NN', ['R', 'AH1', 'M', 'N', 'IY0']), 'rumor': ('NN', ['R', 'UW1', 'M', 'ER0']), 'rumored': ('VBN', ['R', 'UW1', 'M', 'ER0', 'D']), 'rump': ('NN', ['R', 'AH1', 'M', 'P']), 'rumpled': ('VBN', ['R', 'AH1', 'M', 'P', 'AH0', 'L', 'D']), 'rumple': ('NN', ['R', 'AH1', 'M', 'P', 'AH0', 'L']), 'run': ('VB', ['R', 'AH1', 'N']), 'running': ('VBG', ['R', 'AH1', 'N', 'IH0', 'NG']), 'runaway': ('NN', ['R', 'AH1', 'N', 'AH0', 'W', 'EY2']), 'rundle': ('NN', ['R', 'AH1', 'N', 'D', 'AH0', 'L']), 'rune': ('NN', ['R', 'UW1', 'N']), 'runner': ('NN', ['R', 'AH1', 'N', 'ER0']), 'runnion': ('NN', ['R', 'AH1', 'N', 'Y', 'AH0', 'N']), 'runway': ('NN', ['R', 'AH1', 'N', 'W', 'EY2']), 'rupee': ('NN', ['R', 'UW0', 'P', 'IY1']), 'rupture': ('NN', ['R', 'AH1', 'P', 'CH', 'ER0']), 'ruptured': ('VBN', ['R', 'AH1', 'P', 'CH', 'ER0', 'D']), 'rupturing': ('VBG', ['R', 'AH1', 'P', 'CH', 'ER0', 'IH0', 'NG']), 'rural': ('JJ', ['R', 'UH1', 'R', 'AH0', 'L']), 'ties': ('NNS', ['T', 'AY1', 'Z']), 'ruse': ('NN', ['R', 'UW1', 'Z']), 'rush': ('NN', ['R', 'AH1', 'SH']), 'rushed': ('VBN', ['R', 'AH1', 'SH', 'T']), 'rushing': ('VBG', ['R', 'AH1', 'SH', 'IH0', 'NG']), 'rusher': ('NN', ['R', 'AH1', 'SH', 'ER0']), 'rusk': ('NN', ['R', 'AH1', 'S', 'K']), 'russ': ('NN', ['R', 'AH1', 'S']), 'russet': ('NN', ['R', 'AH1', 'S', 'IH0', 'T']), 'russia': ('NN', ['R', 'AH1', 'SH', 'AH0']), 'russian': ('JJ', ['R', 'AH1', 'SH', 'AH0', 'N']), 'russification': ('NN', ['R', 'AH2', 'S', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'rust': ('NN', ['R', 'AH1', 'S', 'T']), 'rusted': ('VBN', ['R', 'AH1', 'S', 'T', 'AH0', 'D']), 'rusting': ('VBG', ['R', 'AH1', 'S', 'T', 'IH0', 'NG']), 'rustic': ('JJ', ['R', 'AH1', 'S', 'T', 'IH0', 'K']), 'rustling': ('VBG', ['R', 'AH1', 'S', 'L', 'IH0', 'NG']), 'rustle': ('NN', ['R', 'AH1', 'S', 'AH0', 'L']), 'rustler': ('NN', ['R', 'AH1', 'S', 'L', 'ER0']), 'rusty': ('NN', ['R', 'AH1', 'S', 'T', 'IY0']), 'rut': ('NN', ['R', 'AH1', 'T']), 'rutted': ('VBN', ['R', 'AH1', 'T', 'IH0', 'D']), 'ruth': ('NN', ['R', 'UW1', 'TH']), 'ruthless': ('NN', ['R', 'UW1', 'TH', 'L', 'AH0', 'S']), 'rutile': ('NN', ['R', 'UW1', 'T', 'IY0', 'L']), 'rutter': ('NN', ['R', 'AH1', 'T', 'ER0']), 'ruttle': ('NN', ['R', 'AH1', 'T', 'AH0', 'L']), 'ryal': ('NN', ['R', 'IY0', 'AA1', 'L']), 'ryder': ('NN', ['R', 'AY1', 'D', 'ER0']), 'rye': ('NN', ['R', 'AY1']), 'rys': ('NN', ['R', 'IH1', 'S']), 's': ('NN', ['EH1', 'S']), "'s": ('POS', ['EH1', 'S']), 'sabbath': ('NN', ['S', 'AE1', 'B', 'AH0', 'TH']), 'sabbatical': ('JJ', ['S', 'AH0', 'B', 'AE1', 'T', 'IH0', 'K', 'AH0', 'L']), 'sabella': ('NN', ['S', 'AH0', 'B', 'EH1', 'L', 'AH0']), 'saber': ('NN', ['S', 'EY1', 'B', 'ER0']), 'sabre': ('NN', ['S', 'EY1', 'B', 'ER0']), 'sabine': ('NN', ['S', 'AH0', 'B', 'IY1', 'N']), 'sable': ('JJ', ['S', 'EY1', 'B', 'AH0', 'L']), 'sac': ('NN', ['S', 'AE1', 'K']), 'saccharin': ('NN', ['S', 'AE1', 'K', 'ER0', 'AH0', 'N']), 'saccharine': ('NN', ['S', 'AE1', 'K', 'ER0', 'AY2', 'N']), 'saccule': ('NN', ['S', 'AE1', 'K', 'Y', 'UW2', 'L']), 'sachet': ('NN', ['S', 'AE0', 'SH', 'EY1']), 'sack': ('NN', ['S', 'AE1', 'K']), 'sacked': ('VBN', ['S', 'AE1', 'K', 'T']), 'sacking': ('VBG', ['S', 'AE1', 'K', 'IH0', 'NG']), 'sacrament': ('NN', ['S', 'AE1', 'K', 'R', 'AH0', 'M', 'AH0', 'N', 'T']), 'sacramental': ('NN', ['S', 'AE1', 'K', 'R', 'AH0', 'M', 'AH0', 'N', 'T', 'AH0', 'L']), 'sacred': ('VBN', ['S', 'EY1', 'K', 'R', 'AH0', 'D']), 'sacrifice': ('NN', ['S', 'AE1', 'K', 'R', 'AH0', 'F', 'AY2', 'S']), 'sacrificed': ('VBN', ['S', 'AE1', 'K', 'R', 'AH0', 'F', 'AY2', 'S', 'T']), 'sacrificing': ('VBG', ['S', 'AE1', 'K', 'R', 'AH0', 'F', 'AY2', 'S', 'IH0', 'NG']), 'sacrificial': ('JJ', ['S', 'AE2', 'K', 'R', 'AH0', 'F', 'IH1', 'SH', 'AH0', 'L']), 'sacrilege': ('NN', ['S', 'AE1', 'K', 'R', 'AH0', 'L', 'AH0', 'JH']), 'sacrilegious': ('JJ', ['S', 'AE2', 'K', 'R', 'AH0', 'L', 'EH1', 'JH', 'IH0', 'S']), 'sacrosanct': ('NN', ['S', 'AE1', 'K', 'R', 'OW0', 'S', 'AE0', 'NG', 'K', 'T']), 'sacra': ('NN', ['S', 'AE1', 'K', 'R', 'AH0']), 'sacs': ('NN', ['S', 'AE1', 'K', 'S']), 'sad': ('NN', ['S', 'AE1', 'D']), 'saddened': ('VBN', ['S', 'AE1', 'D', 'AH0', 'N', 'D']), 'saddening': ('VBG', ['S', 'AE1', 'D', 'AH0', 'N', 'IH0', 'NG']), 'sadden': ('NN', ['S', 'AE1', 'D', 'AH0', 'N']), 'sadder': ('NN', ['S', 'AE1', 'D', 'ER0']), 'saddle': ('NN', ['S', 'AE1', 'D', 'AH0', 'L']), 'saddled': ('VBN', ['S', 'AE1', 'D', 'AH0', 'L', 'D']), 'saddling': ('VBG', ['S', 'AE1', 'D', 'AH0', 'L', 'IH0', 'NG']), 'saddlebags': ('NNS', ['S', 'AE1', 'D', 'AH0', 'L', 'B', 'AE2', 'G', 'Z']), 'saddler': ('NN', ['S', 'AE1', 'D', 'AH0', 'L', 'ER0']), 'sadly': ('RB', ['S', 'AE1', 'D', 'L', 'IY0']), 'sadness': ('NN', ['S', 'AE1', 'D', 'N', 'AH0', 'S']), 'safe': ('JJ', ['S', 'EY1', 'F']), 'safeguard': ('NN', ['S', 'EY1', 'F', 'G', 'AA2', 'R', 'D']), 'safely': ('RB', ['S', 'EY1', 'F', 'L', 'IY0']), 'safety': ('NN', ['S', 'EY1', 'F', 'T', 'IY0']), 'safflower': ('NN', ['S', 'AE1', 'F', 'L', 'AW2', 'ER0']), 'saffron': ('NN', ['S', 'AE1', 'F', 'R', 'AH0', 'N']), 'sagged': ('VBN', ['S', 'AE1', 'G', 'D']), 'sagging': ('VBG', ['S', 'AE1', 'G', 'IH0', 'NG']), 'sag': ('NN', ['S', 'AE1', 'G']), 'sagas': ('NN', ['S', 'AA1', 'G', 'AH0', 'Z']), 'saga': ('NN', ['S', 'AA1', 'G', 'AH0']), 'sage': ('NN', ['S', 'EY1', 'JH']), 'sagebrush': ('NN', ['S', 'EY1', 'JH', 'B', 'R', 'AH2', 'SH']), 'sagittal': ('NN', ['S', 'AE1', 'JH', 'AH0', 'T', 'AH0', 'L']), 'sago': ('NN', ['S', 'EY1', 'G', 'OW0']), 'sai': ('NN', ['S', 'AY1']), 'said': ('VBD', ['S', 'EH1', 'D']), 'sail': ('NN', ['S', 'EY1', 'L']), 'sailed': ('VBN', ['S', 'EY1', 'L', 'D']), 'sailing': ('VBG', ['S', 'EY1', 'L', 'IH0', 'NG']), 'sailboat': ('NN', ['S', 'EY1', 'L', 'B', 'OW2', 'T']), 'sailer': ('NN', ['S', 'EY1', 'L', 'ER0']), 'sailfish': ('NN', ['S', 'EY1', 'L', 'F', 'IH2', 'SH']), 'sailor': ('NN', ['S', 'EY1', 'L', 'ER0']), 'sain': ('NN', ['S', 'EY1', 'N']), 'saint': ('NN', ['S', 'EY1', 'N', 'T']), 'sainted': ('VBN', ['S', 'EY1', 'N', 'T', 'IH0', 'D']), 'sainthood': ('NN', ['S', 'EY1', 'N', 'T', 'HH', 'UH2', 'D']), 'saintly': ('RB', ['S', 'EY1', 'N', 'T', 'L', 'IY0']), 'sake': ('NN', ['S', 'EY1', 'K']), 'saker': ('NN', ['S', 'EY1', 'K', 'ER0']), 'saki': ('NN', ['S', 'AA1', 'K', 'IY0']), 'sal': ('NN', ['S', 'AE1', 'L']), 'salaam': ('NN', ['S', 'AH0', 'L', 'AA1', 'M']), 'salable': ('JJ', ['S', 'EY1', 'L', 'AH0', 'B', 'AH0', 'L']), 'salacious': ('JJ', ['S', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'S']), 'salad': ('NN', ['S', 'AE1', 'L', 'AH0', 'D']), 'salam': ('NN', ['S', 'AA0', 'L', 'AA1', 'M']), 'salamander': ('NN', ['S', 'AE2', 'L', 'AH0', 'M', 'AE1', 'N', 'D', 'ER0']), 'salaried': ('VBN', ['S', 'AE1', 'L', 'ER0', 'IY0', 'D']), 'salary': ('NN', ['S', 'AE1', 'L', 'ER0', 'IY0']), 'salaries': ('NNS', ['S', 'AE1', 'L', 'ER0', 'IY0', 'Z']), 'sale': ('NN', ['S', 'EY1', 'L']), 'saleable': ('JJ', ['S', 'EY1', 'L', 'AH0', 'B', 'AH0', 'L']), 'salesmen': ('NNS', ['S', 'EY1', 'L', 'Z', 'M', 'IH0', 'N']), 'salesman': ('NN', ['S', 'EY1', 'L', 'Z', 'M', 'AH0', 'N']), 'saleswomen': ('NNS', ['S', 'EY1', 'L', 'Z', 'W', 'IH2', 'M', 'AH0', 'N']), 'saleswoman': ('NN', ['S', 'EY1', 'L', 'Z', 'W', 'UH2', 'M', 'AH0', 'N']), 'salient': ('NN', ['S', 'EY1', 'L', 'IY0', 'AH0', 'N', 'T']), 'salina': ('NN', ['S', 'AH0', 'L', 'IY1', 'N', 'AH0']), 'saline': ('NN', ['S', 'AH0', 'L', 'IY1', 'N']), 'salinity': ('NN', ['S', 'AH0', 'L', 'IH1', 'N', 'AH0', 'T', 'IY0']), 'saliva': ('NN', ['S', 'AH0', 'L', 'AY1', 'V', 'AH0']), 'salivated': ('VBN', ['S', 'AE1', 'L', 'AH0', 'V', 'EY2', 'T', 'AH0', 'D']), 'salivating': ('VBG', ['S', 'AE1', 'L', 'AH0', 'V', 'EY2', 'T', 'IH0', 'NG']), 'salivate': ('NN', ['S', 'AE1', 'L', 'AH0', 'V', 'EY2', 'T']), 'sally': ('RB', ['S', 'AE1', 'L', 'IY0']), 'sallies': ('NNS', ['S', 'AE1', 'L', 'IY0', 'Z']), 'salm': ('NN', ['S', 'AA1', 'M']), 'salmi': ('NN', ['S', 'AA1', 'L', 'M', 'IY0']), 'salmons': ('NNS', ['S', 'AE1', 'M', 'AH0', 'N', 'Z']), 'salmon': ('NN', ['S', 'AE1', 'M', 'AH0', 'N']), 'salon': ('NN', ['S', 'AH0', 'L', 'AA1', 'N']), 'saloon': ('NN', ['S', 'AH0', 'L', 'UW1', 'N']), 'salt': ('NN', ['S', 'AO1', 'L', 'T']), 'salted': ('VBN', ['S', 'AO1', 'L', 'T', 'AH0', 'D']), 'salting': ('VBG', ['S', 'AO1', 'L', 'T', 'IH0', 'NG']), 'salter': ('NN', ['S', 'AO1', 'L', 'T', 'ER0']), 'saltier': ('NN', ['S', 'AO1', 'L', 'T', 'IY0', 'ER0']), 'salty': ('NN', ['S', 'AO1', 'L', 'T', 'IY0']), 'salutary': ('JJ', ['S', 'AE1', 'L', 'Y', 'AH0', 'T', 'EH2', 'R', 'IY0']), 'salutatorian': ('JJ', ['S', 'AH0', 'L', 'UW2', 'T', 'AH0', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'saluted': ('VBN', ['S', 'AH0', 'L', 'UW1', 'T', 'AH0', 'D']), 'saluting': ('VBG', ['S', 'AH0', 'L', 'UW1', 'T', 'IH0', 'NG']), 'salute': ('NN', ['S', 'AH0', 'L', 'UW1', 'T']), 'salvage': ('NN', ['S', 'AE1', 'L', 'V', 'AH0', 'JH']), 'salvation': ('NN', ['S', 'AE0', 'L', 'V', 'EY1', 'SH', 'AH0', 'N']), 'salve': ('NN', ['S', 'AA1', 'V']), 'salvia': ('NN', ['S', 'AE1', 'L', 'V', 'IY0', 'AH0']), 'salvos': ('NN', ['S', 'AE1', 'L', 'V', 'OW0', 'Z']), 'salvo': ('NN', ['S', 'AE1', 'L', 'V', 'OW0']), 'sam': ('NN', ['S', 'AE1', 'M']), 'samara': ('NN', ['S', 'AE1', 'M', 'ER0', 'AH0']), 'samaritan': ('NN', ['S', 'AH0', 'M', 'EH1', 'R', 'IH0', 'T', 'AH0', 'N']), 'sambo': ('NN', ['S', 'AE1', 'M', 'B', 'OW0']), 'same': ('JJ', ['S', 'EY1', 'M']), 'sameness': ('NN', ['S', 'EY1', 'M', 'N', 'AH0', 'S']), 'samoan': ('NN', ['S', 'AH0', 'M', 'OW1', 'AH0', 'N']), 'samp': ('NN', ['S', 'AE1', 'M', 'P']), 'sampan': ('NN', ['S', 'AE1', 'M', 'P', 'AE0', 'N']), 'sample': ('NN', ['S', 'AE1', 'M', 'P', 'AH0', 'L']), 'sampler': ('NN', ['S', 'AE1', 'M', 'P', 'L', 'ER0']), 'samson': ('NN', ['S', 'AE1', 'M', 'S', 'AH0', 'N']), 'sanctification': ('NN', ['S', 'AE2', 'NG', 'K', 'T', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'sanctify': ('NN', ['S', 'AE1', 'NG', 'K', 'T', 'AH0', 'F', 'AY0']), 'sanctimonious': ('JJ', ['S', 'AE2', 'NG', 'K', 'T', 'AH0', 'M', 'OW1', 'N', 'IY0', 'AH0', 'S']), 'sanctimony': ('NN', ['S', 'AE1', 'NG', 'K', 'T', 'IH0', 'M', 'OW2', 'N', 'IY0']), 'sanction': ('NN', ['S', 'AE1', 'NG', 'K', 'SH', 'AH0', 'N']), 'sanctioned': ('VBN', ['S', 'AE1', 'NG', 'K', 'SH', 'AH0', 'N', 'D']), 'sanctioning': ('VBG', ['S', 'AE1', 'NG', 'K', 'SH', 'AH0', 'N', 'IH0', 'NG']), 'sanctity': ('NN', ['S', 'AE1', 'NG', 'K', 'T', 'IH0', 'T', 'IY0']), 'sanctuaries': ('NNS', ['S', 'AE1', 'NG', 'K', 'CH', 'UW0', 'EH2', 'R', 'IY0', 'Z']), 'sanctuary': ('NN', ['S', 'AE1', 'NG', 'K', 'CH', 'UW0', 'EH2', 'R', 'IY0']), 'sanctum': ('NN', ['S', 'AE1', 'NG', 'K', 'T', 'AH0', 'M']), 'sand': ('NN', ['S', 'AE1', 'N', 'D']), 'sanded': ('VBD', ['S', 'AE1', 'N', 'D', 'IH0', 'D']), 'sanding': ('VBG', ['S', 'AE1', 'N', 'D', 'IH0', 'NG']), 'sandal': ('NN', ['S', 'AE1', 'N', 'D', 'AH0', 'L']), 'sandbagger': ('NN', ['S', 'AE1', 'N', 'D', 'B', 'AE2', 'G', 'ER0']), 'sanders': ('NNS', ['S', 'AE1', 'N', 'D', 'ER0', 'Z']), 'sandman': ('NN', ['S', 'AE1', 'N', 'D', 'M', 'AE2', 'N']), 'sandpaper': ('NN', ['S', 'AE1', 'N', 'D', 'P', 'EY2', 'P', 'ER0']), 'sandstone': ('NN', ['S', 'AE1', 'N', 'D', 'S', 'T', 'OW2', 'N']), 'sandwich': ('NN', ['S', 'AE1', 'N', 'D', 'W', 'IH0', 'CH']), 'sandwiched': ('VBN', ['S', 'AE1', 'N', 'D', 'W', 'IH2', 'CH', 'T']), 'sandy': ('NN', ['S', 'AE1', 'N', 'D', 'IY0']), 'sane': ('NN', ['S', 'EY1', 'N']), 'sang': ('NN', ['S', 'AE1', 'NG']), 'sanguine': ('NN', ['S', 'AE1', 'NG', 'G', 'W', 'IH0', 'N']), 'sanhedrin': ('NN', ['S', 'AE2', 'N', 'HH', 'IY1', 'D', 'R', 'IH0', 'N']), 'sanitary': ('JJ', ['S', 'AE1', 'N', 'IH0', 'T', 'EH2', 'R', 'IY0']), 'sanitation': ('NN', ['S', 'AE2', 'N', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'sanity': ('NN', ['S', 'AE1', 'N', 'AH0', 'T', 'IY0']), 'sank': ('NN', ['S', 'AE1', 'NG', 'K']), 'sans': ('NNS', ['S', 'AE1', 'N', 'Z']), 'sanskrit': ('NN', ['S', 'AE1', 'N', 'S', 'K', 'R', 'IH0', 'T']), 'santer': ('NN', ['S', 'AE1', 'N', 'T', 'ER0']), 'santon': ('NN', ['S', 'AE1', 'N', 'T', 'AH0', 'N']), 'sao': ('NN', ['S', 'AW1']), 'sap': ('NN', ['S', 'AE1', 'P']), 'sapped': ('VBD', ['S', 'AE1', 'P', 'T']), 'sapping': ('VBG', ['S', 'AE1', 'P', 'IH0', 'NG']), 'sapling': ('VBG', ['S', 'AE1', 'P', 'L', 'IH0', 'NG']), 'saponification': ('NN', ['S', 'AH0', 'P', 'AA2', 'N', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'sapper': ('NN', ['S', 'AE1', 'P', 'ER0']), 'sapphire': ('NN', ['S', 'AE1', 'F', 'AY0', 'ER0']), 'sarasin': ('NN', ['S', 'EH1', 'R', 'IH0', 'S', 'IH0', 'N']), 'sarcasm': ('NN', ['S', 'AA1', 'R', 'K', 'AE2', 'Z', 'AH0', 'M']), 'sarcastic': ('JJ', ['S', 'AA0', 'R', 'K', 'AE1', 'S', 'T', 'IH0', 'K']), 'sarcastically': ('RB', ['S', 'AA0', 'R', 'K', 'AE1', 'S', 'T', 'IH0', 'K', 'L', 'IY0']), 'sarcoma': ('NN', ['S', 'AA0', 'R', 'K', 'OW1', 'M', 'ER0']), 'sarcophagus': ('NN', ['S', 'AA0', 'R', 'K', 'AA1', 'F', 'AH0', 'G', 'AH0', 'S']), 'sardine': ('NN', ['S', 'AA0', 'R', 'D', 'IY1', 'N']), 'sardonic': ('JJ', ['S', 'AA0', 'R', 'D', 'AA1', 'N', 'IH0', 'K']), 'sargasso': ('NN', ['S', 'AA0', 'R', 'G', 'AE1', 'S', 'OW0']), 'sari': ('NN', ['S', 'AA1', 'R', 'IY0']), 'sark': ('NN', ['S', 'AA1', 'R', 'K']), 'sarmatian': ('JJ', ['S', 'AA0', 'R', 'M', 'EY1', 'SH', 'AH0', 'N']), 'sarong': ('NN', ['S', 'ER0', 'AO1', 'NG']), 'saros': ('NNS', ['S', 'AA1', 'R', 'OW0', 'S']), 'sartorial': ('JJ', ['S', 'AA0', 'R', 'T', 'AO1', 'R', 'IY0', 'AH0', 'L']), 'sartorius': ('NN', ['S', 'AA0', 'R', 'T', 'AO1', 'R', 'IY0', 'IH0', 'S']), 'sash': ('NN', ['S', 'AE1', 'SH']), 'sassafras': ('NNS', ['S', 'AE1', 'S', 'AH0', 'F', 'R', 'AE2', 'S']), 'sasse': ('NN', ['S', 'AE1', 'S']), 'sat': ('NN', ['S', 'AE1', 'T']), 'satan': ('NN', ['S', 'EY1', 'T', 'AH0', 'N']), 'satanic': ('NN', ['S', 'AH0', 'T', 'AE1', 'N', 'IH0', 'K']), 'satanism': ('NN', ['S', 'EY1', 'T', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'satanist': ('NN', ['S', 'EY1', 'T', 'AH0', 'N', 'IH0', 'S', 'T']), 'sated': ('VBN', ['S', 'EY1', 'T', 'IH0', 'D']), 'sate': ('NN', ['S', 'EY1', 'T']), 'satellite': ('NN', ['S', 'AE1', 'T', 'AH0', 'L', 'AY2', 'T']), 'satin': ('NN', ['S', 'AE1', 'T', 'AH0', 'N']), 'satire': ('NN', ['S', 'AE1', 'T', 'AY2', 'ER0']), 'satiric': ('NN', ['S', 'AH0', 'T', 'IH1', 'R', 'IH0', 'K']), 'satirical': ('JJ', ['S', 'AH0', 'T', 'IH1', 'R', 'AH0', 'K', 'AH0', 'L']), 'satirist': ('NN', ['S', 'AE1', 'T', 'ER0', 'AH0', 'S', 'T']), 'satirizing': ('VBG', ['S', 'AE1', 'T', 'ER0', 'AY2', 'Z', 'IH0', 'NG']), 'satirize': ('VB', ['S', 'AE1', 'T', 'ER0', 'AY2', 'Z']), 'satisfaction': ('NN', ['S', 'AE2', 'T', 'AH0', 'S', 'F', 'AE1', 'K', 'SH', 'AH0', 'N']), 'satisfactory': ('NN', ['S', 'AE2', 'T', 'AH0', 'S', 'F', 'AE1', 'K', 'T', 'R', 'IY0']), 'satisfied': ('JJ', ['S', 'AE1', 'T', 'AH0', 'S', 'F', 'AY2', 'D']), 'satisfying': ('VBG', ['S', 'AE1', 'T', 'IH0', 'S', 'F', 'AY2', 'IH0', 'NG']), 'satisfy': ('NN', ['S', 'AE1', 'T', 'AH0', 'S', 'F', 'AY2']), 'saturated': ('VBN', ['S', 'AE1', 'CH', 'ER0', 'EY2', 'T', 'AH0', 'D']), 'saturating': ('VBG', ['S', 'AE1', 'CH', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'saturate': ('NN', ['S', 'AE1', 'CH', 'ER0', 'EY2', 'T']), 'saturation': ('NN', ['S', 'AE2', 'CH', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'saturday': ('NN', ['S', 'AE1', 'T', 'ER0', 'D', 'IY0']), 'saturn': ('NN', ['S', 'AE1', 'T', 'ER0', 'N']), 'sauce': ('NN', ['S', 'AO1', 'S']), 'saucepan': ('NN', ['S', 'AO1', 'S', 'P', 'AE2', 'N']), 'saucer': ('NN', ['S', 'AO1', 'S', 'ER0']), 'saucy': ('NN', ['S', 'AO1', 'S', 'IY0']), 'sauerkraut': ('NN', ['S', 'AW1', 'ER0', 'K', 'R', 'AW2', 'T']), 'saul': ('NN', ['S', 'AO1', 'L']), 'sault': ('NN', ['S', 'AO1', 'L', 'T']), 'saunders': ('NNS', ['S', 'AO1', 'N', 'D', 'ER0', 'Z']), 'saunter': ('NN', ['S', 'AO1', 'N', 'T', 'ER0']), 'saur': ('NN', ['S', 'AO1', 'R']), 'sausage': ('NN', ['S', 'AO1', 'S', 'AH0', 'JH']), 'saute': ('NN', ['S', 'AO0', 'T', 'EY1']), 'sauter': ('NN', ['S', 'AO0', 'T', 'EY1', 'ER0']), 'sauterne': ('NN', ['S', 'OW0', 'T', 'ER1', 'N']), 'savage': ('NN', ['S', 'AE1', 'V', 'AH0', 'JH']), 'savagely': ('RB', ['S', 'AE1', 'V', 'IH0', 'JH', 'L', 'IY0']), 'savagery': ('NN', ['S', 'AE1', 'V', 'IH0', 'JH', 'EH2', 'R', 'IY0']), 'savanna': ('NN', ['S', 'AH0', 'V', 'AE1', 'N', 'AH0']), 'savants': ('NNS', ['S', 'AE1', 'V', 'AH0', 'N', 'T', 'S']), 'savant': ('NN', ['S', 'AH0', 'V', 'AA1', 'N', 'T']), 'save': ('VB', ['S', 'EY1', 'V']), 'saved': ('VBN', ['S', 'EY1', 'V', 'D']), 'saving': ('VBG', ['S', 'EY1', 'V', 'IH0', 'NG']), 'savely': ('RB', ['S', 'EY1', 'V', 'L', 'IY0']), 'saver': ('NN', ['S', 'EY1', 'V', 'ER0']), 'savin': ('NN', ['S', 'AE1', 'V', 'IH0', 'N']), 'savior': ('NN', ['S', 'EY1', 'V', 'Y', 'ER0']), 'savor': ('NN', ['S', 'EY1', 'V', 'ER0']), 'savored': ('VBN', ['S', 'EY1', 'V', 'ER0', 'D']), 'savoring': ('VBG', ['S', 'EY1', 'V', 'ER0', 'IH0', 'NG']), 'savory': ('NN', ['S', 'EY1', 'V', 'ER0', 'IY0']), 'savoy': ('NN', ['S', 'AH0', 'V', 'OY1']), 'saw': ('NN', ['S', 'AO1']), 'sawed': ('VBN', ['S', 'AO1', 'D']), 'sawing': ('VBG', ['S', 'AO1', 'IH0', 'NG']), 'sawdust': ('NN', ['S', 'AO1', 'D', 'AH2', 'S', 'T']), 'sawmill': ('NN', ['S', 'AO1', 'M', 'IH2', 'L']), 'sawyer': ('NN', ['S', 'AO1', 'Y', 'ER0']), 'sax': ('NN', ['S', 'AE1', 'K', 'S']), 'saxon': ('NN', ['S', 'AE1', 'K', 'S', 'AH0', 'N']), 'saxophone': ('NN', ['S', 'AE1', 'K', 'S', 'AH0', 'F', 'OW2', 'N']), 'say': ('VB', ['S', 'EY1']), 'saying': ('VBG', ['S', 'EY1', 'IH0', 'NG']), 'sayer': ('NN', ['S', 'EY1', 'ER0']), 'scab': ('NN', ['S', 'K', 'AE1', 'B']), 'scabbard': ('NN', ['S', 'K', 'AE1', 'B', 'ER0', 'D']), 'scad': ('NN', ['S', 'K', 'AE1', 'D']), 'scaffold': ('NN', ['S', 'K', 'AE1', 'F', 'AH0', 'L', 'D']), 'scaffolding': ('VBG', ['S', 'K', 'AE1', 'F', 'AH0', 'L', 'D', 'IH0', 'NG']), 'scala': ('NN', ['S', 'K', 'AA1', 'L', 'AH0']), 'scalar': ('NN', ['S', 'K', 'EY1', 'L', 'ER0']), 'scalded': ('VBD', ['S', 'K', 'AO1', 'L', 'D', 'IH0', 'D']), 'scalding': ('VBG', ['S', 'K', 'AO1', 'L', 'D', 'IH0', 'NG']), 'scald': ('NN', ['S', 'K', 'AO1', 'L', 'D']), 'scale': ('NN', ['S', 'K', 'EY1', 'L']), 'scaled': ('VBN', ['S', 'K', 'EY1', 'L', 'D']), 'scaling': ('VBG', ['S', 'K', 'EY1', 'L', 'IH0', 'NG']), 'scallion': ('NN', ['S', 'K', 'AE1', 'L', 'Y', 'AH0', 'N']), 'scallop': ('NN', ['S', 'K', 'AE1', 'L', 'AH0', 'P']), 'scalloped': ('VBD', ['S', 'K', 'AA1', 'L', 'AH0', 'P', 'T']), 'scalp': ('NN', ['S', 'K', 'AE1', 'L', 'P']), 'scalped': ('VBD', ['S', 'K', 'AE1', 'L', 'P', 'T']), 'scalping': ('VBG', ['S', 'K', 'AE1', 'L', 'P', 'IH0', 'NG']), 'scalpel': ('NN', ['S', 'K', 'AE1', 'L', 'P', 'AH0', 'L']), 'scalper': ('NN', ['S', 'K', 'AE1', 'L', 'P', 'ER0']), 'scampered': ('VBN', ['S', 'K', 'AE1', 'M', 'P', 'ER0', 'D']), 'scampering': ('VBG', ['S', 'K', 'AE1', 'M', 'P', 'ER0', 'IH0', 'NG']), 'scamper': ('NN', ['S', 'K', 'AE1', 'M', 'P', 'ER0']), 'scanned': ('VBN', ['S', 'K', 'AE1', 'N', 'D']), 'scanning': ('VBG', ['S', 'K', 'AE1', 'N', 'IH0', 'NG']), 'scan': ('JJ', ['S', 'K', 'AE1', 'N']), 'scandal': ('NN', ['S', 'K', 'AE1', 'N', 'D', 'AH0', 'L']), 'scandalized': ('VBN', ['S', 'K', 'AE1', 'N', 'D', 'AH0', 'L', 'AY2', 'Z', 'D']), 'scandalize': ('VB', ['S', 'K', 'AE1', 'N', 'D', 'AH0', 'L', 'AY2', 'Z']), 'scandalous': ('JJ', ['S', 'K', 'AE1', 'N', 'D', 'AH0', 'L', 'AH0', 'S']), 'scandia': ('NN', ['S', 'K', 'AE1', 'N', 'D', 'IY0', 'AH0']), 'scandinavian': ('JJ', ['S', 'K', 'AE2', 'N', 'D', 'IH0', 'N', 'EY1', 'V', 'IY0', 'AH0', 'N']), 'scant': ('NN', ['S', 'K', 'AE1', 'N', 'T']), 'scantily': ('RB', ['S', 'K', 'AE1', 'N', 'T', 'AH0', 'L', 'IY0']), 'scantiness': ('NN', ['S', 'K', 'AE1', 'N', 'T', 'IY0', 'N', 'AH0', 'S']), 'scanty': ('NN', ['S', 'K', 'AE1', 'N', 'T', 'IY0']), 'scape': ('NN', ['S', 'K', 'EY1', 'P']), 'scapegoat': ('NN', ['S', 'K', 'EY1', 'P', 'G', 'OW2', 'T']), 'scapula': ('NN', ['S', 'K', 'AE1', 'P', 'Y', 'AH0', 'L', 'AH0']), 'scar': ('NN', ['S', 'K', 'AA1', 'R']), 'scarred': ('VBN', ['S', 'K', 'AA1', 'R', 'D']), 'scarring': ('VBG', ['S', 'K', 'AA1', 'R', 'IH0', 'NG']), 'scaramouch': ('JJ', ['S', 'K', 'AE1', 'R', 'AH0', 'M', 'AW2', 'CH']), 'scarce': ('NN', ['S', 'K', 'EH1', 'R', 'S']), 'scarcely': ('RB', ['S', 'K', 'EH1', 'R', 'S', 'L', 'IY0']), 'scarcity': ('NN', ['S', 'K', 'EH1', 'R', 'S', 'IH0', 'T', 'IY0']), 'scared': ('VBN', ['S', 'K', 'EH1', 'R', 'D']), 'scaring': ('VBG', ['S', 'K', 'EH1', 'R', 'IH0', 'NG']), 'scare': ('NN', ['S', 'K', 'EH1', 'R']), 'scarecrow': ('NN', ['S', 'K', 'AE1', 'R', 'K', 'R', 'OW0']), 'scarf': ('NN', ['S', 'K', 'AA1', 'R', 'F']), 'scarfs': ('NN', ['S', 'K', 'AA1', 'R', 'F', 'S']), 'scarves': ('NNS', ['S', 'K', 'AA1', 'R', 'V', 'Z']), 'scarlet': ('NN', ['S', 'K', 'AA1', 'R', 'L', 'AH0', 'T']), 'scarp': ('NN', ['S', 'K', 'AA1', 'R', 'P']), 'scarry': ('NN', ['S', 'K', 'AE1', 'R', 'IY0']), 'scary': ('JJ', ['S', 'K', 'EH1', 'R', 'IY0']), 'scat': ('NN', ['S', 'K', 'AE1', 'T']), 'scathing': ('VBG', ['S', 'K', 'EY1', 'DH', 'IH0', 'NG']), 'scattered': ('VBN', ['S', 'K', 'AE1', 'T', 'ER0', 'D']), 'scattering': ('VBG', ['S', 'K', 'AE1', 'T', 'ER0', 'IH0', 'NG']), 'scatter': ('NN', ['S', 'K', 'AE1', 'T', 'ER0']), 'scattergood': ('NN', ['S', 'K', 'AE1', 'T', 'ER0', 'G', 'UH2', 'D']), 'scavenge': ('NN', ['S', 'K', 'AE1', 'V', 'AH0', 'N', 'JH']), 'scavenger': ('NN', ['S', 'K', 'AE1', 'V', 'AH0', 'N', 'JH', 'ER0']), 'scenario': ('NN', ['S', 'IH0', 'N', 'EH1', 'R', 'IY0', 'OW0']), 'scene': ('NN', ['S', 'IY1', 'N']), 'scenery': ('NN', ['S', 'IY1', 'N', 'ER0', 'IY0']), 'scenic': ('NN', ['S', 'IY1', 'N', 'IH0', 'K']), 'scented': ('VBN', ['S', 'EH1', 'N', 'T', 'IH0', 'D']), 'scent': ('NN', ['S', 'EH1', 'N', 'T']), 'sceptre': ('NN', ['S', 'EH1', 'P', 'T', 'ER0']), 'schade': ('NN', ['SH', 'EY1', 'D']), 'schedule': ('NN', ['S', 'K', 'EH1', 'JH', 'UH0', 'L']), 'schemata': ('NNS', ['S', 'K', 'IH0', 'M', 'AE1', 'T', 'AH0']), 'schematic': ('JJ', ['S', 'K', 'IH0', 'M', 'AE1', 'T', 'IH0', 'K']), 'scheme': ('NN', ['S', 'K', 'IY1', 'M']), 'schemed': ('VBN', ['S', 'K', 'IY1', 'M', 'D']), 'scheming': ('VBG', ['S', 'K', 'IY1', 'M', 'IH0', 'NG']), 'schemer': ('NN', ['S', 'K', 'IY1', 'M', 'ER0']), 'schiller': ('NN', ['SH', 'IH1', 'L', 'ER0']), 'schilling': ('VBG', ['SH', 'IH1', 'L', 'IH0', 'NG']), 'schism': ('NN', ['S', 'K', 'IH1', 'Z', 'AH0', 'M']), 'schist': ('NN', ['SH', 'IH1', 'S', 'T']), 'schnapps': ('NN', ['SH', 'N', 'AE1', 'P', 'S']), 'scholar': ('NN', ['S', 'K', 'AA1', 'L', 'ER0']), 'scholarly': ('RB', ['S', 'K', 'AA1', 'L', 'ER0', 'L', 'IY0']), 'scholarship': ('NN', ['S', 'K', 'AA1', 'L', 'ER0', 'SH', 'IH2', 'P']), 'scholastic': ('JJ', ['S', 'K', 'AH0', 'L', 'AE1', 'S', 'T', 'IH0', 'K']), 'school': ('NN', ['S', 'K', 'UW1', 'L']), 'schooled': ('VBN', ['S', 'K', 'UW1', 'L', 'D']), 'schooling': ('VBG', ['S', 'K', 'UW1', 'L', 'IH0', 'NG']), 'schoolbook': ('NN', ['S', 'K', 'UW1', 'L', 'B', 'UH2', 'K']), 'schoolboy': ('NN', ['S', 'K', 'UW1', 'L', 'B', 'OY2']), 'schoolhouse': ('NN', ['S', 'K', 'UW1', 'L', 'HH', 'AW2', 'S']), 'schoolmaster': ('NN', ['S', 'K', 'UW1', 'L', 'M', 'AE2', 'S', 'T', 'ER0']), 'schoolmate': ('NN', ['S', 'K', 'UW1', 'L', 'M', 'EY2', 'T']), 'schoolroom': ('NN', ['S', 'K', 'UW1', 'L', 'R', 'UW2', 'M']), 'schooner': ('NN', ['S', 'K', 'UW1', 'N', 'ER0']), 'science': ('NN', ['S', 'AY1', 'AH0', 'N', 'S']), 'scientific': ('JJ', ['S', 'AY2', 'AH0', 'N', 'T', 'IH1', 'F', 'IH0', 'K']), 'scientifically': ('RB', ['S', 'AY2', 'AH0', 'N', 'T', 'IH1', 'F', 'IH0', 'K', 'AH0', 'L', 'IY0']), 'scientist': ('NN', ['S', 'AY1', 'AH0', 'N', 'T', 'IH0', 'S', 'T']), 'scintilla': ('NN', ['S', 'IH0', 'N', 'T', 'IH1', 'L', 'AH0']), 'scion': ('NN', ['S', 'AY1', 'AH0', 'N']), 'scissor': ('NN', ['S', 'IH1', 'Z', 'ER0']), 'scissors': ('NNS', ['S', 'IH1', 'Z', 'ER0', 'Z']), 'scleroderma': ('NN', ['S', 'K', 'L', 'IH2', 'R', 'AH0', 'D', 'ER1', 'M', 'AH0']), 'sclerosis': ('NN', ['S', 'K', 'L', 'ER0', 'OW1', 'S', 'AH0', 'S']), 'scoff': ('NN', ['S', 'K', 'AO1', 'F']), 'scoffed': ('NN', ['S', 'K', 'AO1', 'F', 'T']), 'scolded': ('VBD', ['S', 'K', 'OW1', 'L', 'D', 'AH0', 'D']), 'scolding': ('VBG', ['S', 'K', 'OW1', 'L', 'D', 'IH0', 'NG']), 'scold': ('NN', ['S', 'K', 'OW1', 'L', 'D']), 'sconce': ('NN', ['S', 'K', 'AA1', 'N', 'S']), 'scone': ('NN', ['S', 'K', 'OW1', 'N']), 'scoop': ('NN', ['S', 'K', 'UW1', 'P']), 'scooped': ('VBD', ['S', 'K', 'UW1', 'P', 'T']), 'scooping': ('VBG', ['S', 'K', 'UW1', 'P', 'IH0', 'NG']), 'scooper': ('NN', ['S', 'K', 'UW1', 'P', 'ER0']), 'scoot': ('NN', ['S', 'K', 'UW1', 'T']), 'scope': ('NN', ['S', 'K', 'OW1', 'P']), 'scorched': ('VBN', ['S', 'K', 'AO1', 'R', 'CH', 'T']), 'scorching': ('VBG', ['S', 'K', 'AO1', 'R', 'CH', 'IH0', 'NG']), 'scorch': ('NN', ['S', 'K', 'AO1', 'R', 'CH']), 'score': ('NN', ['S', 'K', 'AO1', 'R']), 'scored': ('VBN', ['S', 'K', 'AO1', 'R', 'D']), 'scoring': ('VBG', ['S', 'K', 'AO1', 'R', 'IH0', 'NG']), 'scorer': ('NN', ['S', 'K', 'AO1', 'R', 'ER0']), 'scorn': ('NN', ['S', 'K', 'AO1', 'R', 'N']), 'scorned': ('VBN', ['S', 'K', 'AO1', 'R', 'N', 'D']), 'scornful': ('JJ', ['S', 'K', 'AO1', 'R', 'N', 'F', 'AH0', 'L']), 'scorpio': ('NN', ['S', 'K', 'AO1', 'R', 'P', 'IY0', 'OW2']), 'scorpion': ('NN', ['S', 'K', 'AO1', 'R', 'P', 'IY0', 'AH0', 'N']), 'scot': ('NN', ['S', 'K', 'AA1', 'T']), 'scotch': ('NN', ['S', 'K', 'AA1', 'CH']), 'scotched': ('VBN', ['S', 'K', 'AA1', 'CH', 'T']), 'scotia': ('NN', ['S', 'K', 'OW1', 'SH', 'AH0']), 'scots': ('NNS', ['S', 'K', 'AA1', 'T', 'S']), 'scotsman': ('NN', ['S', 'K', 'AA1', 'T', 'S', 'M', 'AH0', 'N']), 'scottish': ('JJ', ['S', 'K', 'AA1', 'T', 'IH0', 'SH']), 'scoundrel': ('NN', ['S', 'K', 'AW1', 'N', 'D', 'R', 'AH0', 'L']), 'scoured': ('VBN', ['S', 'K', 'AW1', 'ER0', 'D']), 'scouring': ('VBG', ['S', 'K', 'AW1', 'ER0', 'IH0', 'NG']), 'scour': ('NN', ['S', 'K', 'AW1', 'ER0']), 'scourge': ('NN', ['S', 'K', 'ER1', 'JH']), 'scourged': ('VBN', ['S', 'K', 'ER1', 'JH', 'D']), 'scourging': ('VBG', ['S', 'K', 'ER1', 'JH', 'IH0', 'NG']), 'scout': ('NN', ['S', 'K', 'AW1', 'T']), 'scouted': ('VBN', ['S', 'K', 'AW1', 'T', 'AH0', 'D']), 'scouting': ('VBG', ['S', 'K', 'AW1', 'T', 'IH0', 'NG']), 'scovel': ('NN', ['S', 'K', 'OW1', 'V', 'AH0', 'L']), 'scow': ('NN', ['S', 'K', 'AW1']), 'scowled': ('VBN', ['S', 'K', 'AW1', 'L', 'D']), 'scowling': ('VBG', ['S', 'K', 'AW1', 'L', 'IH0', 'NG']), 'scowl': ('NN', ['S', 'K', 'AW1', 'L']), 'scrabble': ('JJ', ['S', 'K', 'R', 'AE1', 'B', 'AH0', 'L']), 'scrambled': ('VBN', ['S', 'K', 'R', 'AE1', 'M', 'B', 'AH0', 'L', 'D']), 'scrambling': ('VBG', ['S', 'K', 'R', 'AE1', 'M', 'B', 'AH0', 'L', 'IH0', 'NG']), 'scramble': ('JJ', ['S', 'K', 'R', 'AE1', 'M', 'B', 'AH0', 'L']), 'scrap': ('NN', ['S', 'K', 'R', 'AE1', 'P']), 'scrapbook': ('NN', ['S', 'K', 'R', 'AE1', 'P', 'B', 'UH2', 'K']), 'scraped': ('VBD', ['S', 'K', 'R', 'EY1', 'P', 'T']), 'scraping': ('VBG', ['S', 'K', 'R', 'EY1', 'P', 'IH0', 'NG']), 'scrape': ('NN', ['S', 'K', 'R', 'EY1', 'P']), 'scraper': ('NN', ['S', 'K', 'R', 'EY1', 'P', 'ER0']), 'scrappy': ('JJ', ['S', 'K', 'R', 'AE1', 'P', 'IY0']), 'scratched': ('VBN', ['S', 'K', 'R', 'AE1', 'CH', 'T']), 'scratching': ('VBG', ['S', 'K', 'R', 'AE1', 'CH', 'IH0', 'NG']), 'scratch': ('NN', ['S', 'K', 'R', 'AE1', 'CH']), 'scratchy': ('NN', ['S', 'K', 'R', 'AE1', 'CH', 'IY0']), 'scrawl': ('NN', ['S', 'K', 'R', 'AO1', 'L']), 'scrawled': ('VBN', ['S', 'K', 'R', 'AO1', 'L', 'D']), 'scrawny': ('NN', ['S', 'K', 'R', 'AO1', 'N', 'IY0']), 'screamed': ('VBN', ['S', 'K', 'R', 'IY1', 'M', 'D']), 'screaming': ('VBG', ['S', 'K', 'R', 'IY1', 'M', 'IH0', 'NG']), 'scream': ('NN', ['S', 'K', 'R', 'IY1', 'M']), 'screamer': ('NN', ['S', 'K', 'R', 'IY1', 'M', 'ER0']), 'screeched': ('VBN', ['S', 'K', 'R', 'IY1', 'CH', 'T']), 'screeching': ('VBG', ['S', 'K', 'R', 'IY1', 'CH', 'IH0', 'NG']), 'screech': ('NN', ['S', 'K', 'R', 'IY1', 'CH']), 'screed': ('NN', ['S', 'K', 'R', 'IY1', 'D']), 'screen': ('NN', ['S', 'K', 'R', 'IY1', 'N']), 'screened': ('VBN', ['S', 'K', 'R', 'IY1', 'N', 'D']), 'screening': ('VBG', ['S', 'K', 'R', 'IY1', 'N', 'IH0', 'NG']), 'screenings': ('NNS', ['S', 'K', 'R', 'IY1', 'N', 'IH0', 'NG', 'Z']), 'screw': ('NN', ['S', 'K', 'R', 'UW1']), 'screwed': ('VBN', ['S', 'K', 'R', 'UW1', 'D']), 'screwing': ('VBG', ['S', 'K', 'R', 'UW1', 'IH0', 'NG']), 'scribble': ('JJ', ['S', 'K', 'R', 'IH1', 'B', 'AH0', 'L']), 'scribbled': ('VBN', ['S', 'K', 'R', 'IH1', 'B', 'AH0', 'L', 'D']), 'scribbling': ('VBG', ['S', 'K', 'R', 'IH1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'scribbler': ('NN', ['S', 'K', 'R', 'IH1', 'B', 'L', 'ER0']), 'scribe': ('NN', ['S', 'K', 'R', 'AY1', 'B']), 'scriber': ('NN', ['S', 'K', 'R', 'AY1', 'B', 'ER0']), 'scrimmage': ('NN', ['S', 'K', 'R', 'IH1', 'M', 'IH0', 'JH']), 'scrimping': ('VBG', ['S', 'K', 'R', 'IH1', 'M', 'P', 'IH0', 'NG']), 'scrimp': ('NN', ['S', 'K', 'R', 'IH1', 'M', 'P']), 'scrimshaw': ('NN', ['S', 'K', 'R', 'IH1', 'M', 'SH', 'AO2']), 'scrip': ('NN', ['S', 'K', 'R', 'IH1', 'P']), 'script': ('NN', ['S', 'K', 'R', 'IH1', 'P', 'T']), 'scriptural': ('JJ', ['S', 'K', 'R', 'IH1', 'P', 'CH', 'ER0', 'AH0', 'L']), 'scripture': ('NN', ['S', 'K', 'R', 'IH1', 'P', 'CH', 'ER0']), 'scrivener': ('NN', ['S', 'K', 'R', 'IH1', 'V', 'N', 'ER0']), 'scrod': ('NN', ['S', 'K', 'R', 'AA1', 'D']), 'scroll': ('NN', ['S', 'K', 'R', 'OW1', 'L']), 'scrubbed': ('NN', ['S', 'K', 'R', 'AH1', 'B', 'D']), 'scrubbing': ('VBG', ['S', 'K', 'R', 'AH1', 'B', 'IH0', 'NG']), 'scrub': ('NN', ['S', 'K', 'R', 'AH1', 'B']), 'scrubber': ('NN', ['S', 'K', 'R', 'AH1', 'B', 'ER0']), 'scrubby': ('NN', ['S', 'K', 'R', 'AH1', 'B', 'IY0']), 'scrunch': ('NN', ['S', 'K', 'R', 'AH1', 'N', 'CH']), 'scruple': ('NN', ['S', 'K', 'R', 'UW1', 'P', 'AH0', 'L']), 'scrupulous': ('JJ', ['S', 'K', 'R', 'UW1', 'P', 'Y', 'AH0', 'L', 'AH0', 'S']), 'scrutinized': ('VBN', ['S', 'K', 'R', 'UW1', 'T', 'AH0', 'N', 'AY2', 'Z', 'D']), 'scrutinizing': ('VBG', ['S', 'K', 'R', 'UW1', 'T', 'AH0', 'N', 'AY2', 'Z', 'IH0', 'NG']), 'scrutinize': ('VB', ['S', 'K', 'R', 'UW1', 'T', 'AH0', 'N', 'AY2', 'Z']), 'scrutiny': ('NN', ['S', 'K', 'R', 'UW1', 'T', 'AH0', 'N', 'IY0']), 'scud': ('NN', ['S', 'K', 'AH1', 'D']), 'scuff': ('NN', ['S', 'K', 'AH1', 'F']), 'scuffed': ('NN', ['S', 'K', 'AH1', 'F', 'T']), 'scuffled': ('VBN', ['S', 'K', 'AH1', 'F', 'AH0', 'L', 'D']), 'scuffling': ('VBG', ['S', 'K', 'AH1', 'F', 'AH0', 'L', 'IH0', 'NG']), 'scuffle': ('NN', ['S', 'K', 'AH1', 'F', 'AH0', 'L']), 'scull': ('NN', ['S', 'K', 'AH1', 'L']), 'scullion': ('NN', ['S', 'K', 'AH1', 'L', 'Y', 'AH0', 'N']), 'sculptor': ('NN', ['S', 'K', 'AH1', 'L', 'P', 'T', 'ER0']), 'sculptural': ('JJ', ['S', 'K', 'AH1', 'L', 'P', 'CH', 'ER0', 'AH0', 'L']), 'sculpture': ('NN', ['S', 'K', 'AH1', 'L', 'P', 'CH', 'ER0']), 'sculptured': ('VBN', ['S', 'K', 'AH1', 'L', 'P', 'CH', 'ER0', 'D']), 'sculpturing': ('VBG', ['S', 'K', 'AH1', 'L', 'P', 'T', 'ER0', 'IH0', 'NG']), 'scum': ('NN', ['S', 'K', 'AH1', 'M']), 'scupper': ('NN', ['S', 'K', 'AH1', 'P', 'ER0']), 'scuppernong': ('NN', ['S', 'K', 'AH1', 'P', 'ER0', 'N', 'AO2', 'NG']), 'scurrilous': ('JJ', ['S', 'K', 'ER1', 'AH0', 'L', 'AH0', 'S']), 'scurry': ('NN', ['S', 'K', 'ER1', 'IY0']), 'scuttle': ('NN', ['S', 'K', 'AH1', 'T', 'AH0', 'L']), 'scuttled': ('VBN', ['S', 'K', 'AH1', 'T', 'AH0', 'L', 'D']), 'scuttling': ('VBG', ['S', 'K', 'AH1', 'T', 'AH0', 'L', 'IH0', 'NG']), 'scylla': ('NN', ['S', 'IH1', 'L', 'AH0']), 'scythe': ('NN', ['S', 'IH1', 'TH']), 'scythian': ('JJ', ['S', 'IH1', 'TH', 'IY0', 'AH0', 'N']), 'sea': ('NN', ['S', 'IY1']), 'seabeach': ('NN', ['S', 'IY1', 'B', 'IY2', 'CH']), 'seaboard': ('NN', ['S', 'IY1', 'B', 'AO2', 'R', 'D']), 'seacoast': ('NN', ['S', 'IY1', 'K', 'OW2', 'S', 'T']), 'seafarer': ('NN', ['S', 'IY1', 'F', 'EH2', 'R', 'ER0']), 'seagoing': ('VBG', ['S', 'IY1', 'G', 'OW2', 'IH0', 'NG']), 'seal': ('NN', ['S', 'IY1', 'L']), 'sealed': ('VBN', ['S', 'IY1', 'L', 'D']), 'sealer': ('NN', ['S', 'IY1', 'L', 'ER0']), 'seam': ('NN', ['S', 'IY1', 'M']), 'seamen': ('NNS', ['S', 'IY1', 'M', 'AH0', 'N']), 'seaman': ('NN', ['S', 'IY1', 'M', 'AH0', 'N']), 'seamless': ('NN', ['S', 'IY1', 'M', 'L', 'AH0', 'S']), 'seamster': ('NN', ['S', 'IY1', 'M', 'S', 'T', 'ER0']), 'seamstress': ('NN', ['S', 'IY1', 'M', 'S', 'T', 'R', 'IH0', 'S']), 'seamy': ('NN', ['S', 'IY1', 'M', 'IY0']), 'sean': ('NN', ['SH', 'AO1', 'N']), 'seance': ('NN', ['S', 'EY1', 'AA0', 'N', 'S']), 'seaport': ('NN', ['S', 'IY1', 'P', 'AO2', 'R', 'T']), 't': ('NN', ['T', 'IY1']), 'ta': ('NN', ['T', 'AA1']), 'tab': ('NN', ['T', 'AE1', 'B']), 'tabby': ('NN', ['T', 'AE1', 'B', 'IY0']), 'taber': ('NN', ['T', 'EY1', 'B', 'ER0']), 'tabernacle': ('NN', ['T', 'AE1', 'B', 'ER0', 'N', 'AE2', 'K', 'AH0', 'L']), 'tablature': ('NN', ['T', 'AE1', 'B', 'L', 'AH0', 'CH', 'ER0']), 'table': ('NN', ['T', 'EY1', 'B', 'AH0', 'L']), 'tableaux': ('NN', ['T', 'AH0', 'B', 'L', 'OW1']), 'tableau': ('NN', ['T', 'AH0', 'B', 'L', 'OW1']), 'tablecloth': ('NN', ['T', 'EY1', 'B', 'AH0', 'L', 'K', 'L', 'AO2', 'TH']), 'tabler': ('NN', ['T', 'EY1', 'B', 'AH0', 'L', 'ER0']), 'tablespoon': ('NN', ['T', 'EY1', 'B', 'AH0', 'L', 'S', 'P', 'UW2', 'N']), 'tablet': ('NN', ['T', 'AE1', 'B', 'L', 'AH0', 'T']), 'tableware': ('NN', ['T', 'EY1', 'B', 'AH0', 'L', 'W', 'EH2', 'R']), 'tabling': ('VBG', ['T', 'EY1', 'B', 'AH0', 'L', 'IH0', 'NG']), 'taboo': ('NN', ['T', 'AE0', 'B', 'UW1']), 'tabor': ('NN', ['T', 'EY1', 'B', 'ER0']), 'tabulated': ('VBN', ['T', 'AE1', 'B', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'D']), 'tabulating': ('VBG', ['T', 'AE1', 'B', 'Y', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'tabulate': ('NN', ['T', 'AE1', 'B', 'Y', 'AH0', 'L', 'EY2', 'T']), 'tabulation': ('NN', ['T', 'AE2', 'B', 'Y', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'tac': ('NN', ['T', 'AE1', 'K']), 'tacit': ('NN', ['T', 'AE1', 'S', 'IH0', 'T']), 'taciturn': ('NN', ['T', 'AE1', 'S', 'IH0', 'T', 'ER2', 'N']), 'tack': ('NN', ['T', 'AE1', 'K']), 'tacked': ('NNS', ['T', 'AE1', 'K', 'T']), 'tacking': ('VBG', ['T', 'AE1', 'K', 'IH0', 'NG']), 'tacker': ('NN', ['T', 'AE1', 'K', 'ER0']), 'tackle': ('NN', ['T', 'AE1', 'K', 'AH0', 'L']), 'tackled': ('VBN', ['T', 'AE1', 'K', 'AH0', 'L', 'D']), 'tackling': ('VBG', ['T', 'AE1', 'K', 'L', 'IH0', 'NG']), 'tacky': ('NN', ['T', 'AE1', 'K', 'IY0']), 'tact': ('NN', ['T', 'AE1', 'K', 'T']), 'tactic': ('JJ', ['T', 'AE1', 'K', 'T', 'IH0', 'K']), 'tactical': ('JJ', ['T', 'AE1', 'K', 'T', 'IH0', 'K', 'AH0', 'L']), 'tactician': ('JJ', ['T', 'AE0', 'K', 'T', 'IH1', 'SH', 'AH0', 'N']), 'tactics': ('NNS', ['T', 'AE1', 'K', 'T', 'IH0', 'K', 'S']), 'tactile': ('NN', ['T', 'AE1', 'K', 'T', 'IH0', 'L']), 'tactless': ('NN', ['T', 'AE1', 'K', 'T', 'L', 'AH0', 'S']), 'tadpole': ('NN', ['T', 'AE1', 'D', 'P', 'OW2', 'L']), 'taffeta': ('NN', ['T', 'AE1', 'F', 'AH0', 'T', 'AH0']), 'taffy': ('NN', ['T', 'AE1', 'F', 'IY0']), 'tag': ('NN', ['T', 'AE1', 'G']), 'tagged': ('VBN', ['T', 'AE1', 'G', 'D']), 'tagging': ('VBG', ['T', 'AE1', 'G', 'IH0', 'NG']), 'tail': ('NN', ['T', 'EY1', 'L']), 'tailed': ('VBN', ['T', 'EY1', 'L', 'D']), 'tailing': ('VBG', ['T', 'EY1', 'L', 'IH0', 'NG']), 'tailor': ('NN', ['T', 'EY1', 'L', 'ER0']), 'tailored': ('VBN', ['T', 'EY1', 'L', 'ER0', 'D']), 'tailoring': ('VBG', ['T', 'EY1', 'L', 'ER0', 'IH0', 'NG']), 'taint': ('NN', ['T', 'EY1', 'N', 'T']), 'tainted': ('VBN', ['T', 'EY1', 'N', 'T', 'IH0', 'D']), 'tainting': ('VBG', ['T', 'EY1', 'N', 'T', 'IH0', 'NG']), 'taira': ('NN', ['T', 'AA0', 'IH1', 'R', 'AH0']), 'tait': ('NN', ['T', 'EY1', 'T']), 'take': ('VB', ['T', 'EY1', 'K']), 'took': ('VBD', ['T', 'UH1', 'K']), 'taking': ('VBG', ['T', 'EY1', 'K', 'IH0', 'NG']), 'taken': ('VBN', ['T', 'EY1', 'K', 'AH0', 'N']), 'taker': ('NN', ['T', 'EY1', 'K', 'ER0']), 'talbot': ('NN', ['T', 'AE1', 'L', 'B', 'AH0', 'T']), 'talc': ('NN', ['T', 'AE1', 'L', 'K']), 'tale': ('NN', ['T', 'EY1', 'L']), 'u': ('NN', ['Y', 'UW1']), 'ubiquitous': ('JJ', ['Y', 'UW0', 'B', 'IH1', 'K', 'W', 'IH0', 'T', 'AH0', 'S']), 'ubiquity': ('NN', ['Y', 'UW0', 'B', 'IH1', 'K', 'W', 'IH0', 'T', 'IY0']), 'ugh': ('IN', ['AH1', 'G']), 'ugliness': ('NN', ['AH1', 'G', 'L', 'IY0', 'N', 'AH0', 'S']), 'ugly': ('RB', ['AH1', 'G', 'L', 'IY0']), 'ukase': ('NN', ['Y', 'UW1', 'K', 'EY2', 'Z']), 'ulcer': ('NN', ['AH1', 'L', 'S', 'ER0']), 'ulceration': ('NN', ['AH2', 'L', 'S', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'ulmus': ('NN', ['AH1', 'L', 'M', 'AH0', 'S']), 'ulster': ('NN', ['AH1', 'L', 'S', 'T', 'ER0']), 'ulterior': ('JJ', ['AH0', 'L', 'T', 'IH1', 'R', 'IY0', 'ER0']), 'ultima': ('NN', ['AH1', 'L', 'T', 'IH0', 'M', 'AH0']), 'ultimate': ('JJ', ['AH1', 'L', 'T', 'AH0', 'M', 'AH0', 'T']), 'ultimately': ('RB', ['AH1', 'L', 'T', 'AH0', 'M', 'AH0', 'T', 'L', 'IY0']), 'ultimatums': ('NNS', ['AH2', 'L', 'T', 'AH0', 'M', 'EY1', 'T', 'AH0', 'M', 'Z']), 'ultimatum': ('NN', ['AH2', 'L', 'T', 'AH0', 'M', 'EY1', 'T', 'AH0', 'M']), 'ultra': ('JJ', ['AH1', 'L', 'T', 'R', 'AH0']), 'ultraviolet': ('NN', ['AH2', 'L', 'T', 'R', 'AH0', 'V', 'AY1', 'AH0', 'L', 'IH0', 'T']), 'ulva': ('NN', ['UW1', 'L', 'V', 'AH0']), 'umbel': ('NN', ['AH1', 'M', 'B', 'AH0', 'L']), 'umbilical': ('JJ', ['AH0', 'M', 'B', 'IH1', 'L', 'IH0', 'K', 'AH0', 'L']), 'umbra': ('NN', ['AH1', 'M', 'B', 'R', 'AH0']), 'umbrage': ('NN', ['AH1', 'M', 'B', 'R', 'IH0', 'JH']), 'umbrella': ('NN', ['AH0', 'M', 'B', 'R', 'EH1', 'L', 'AH0']), 'umpire': ('NN', ['AH1', 'M', 'P', 'AY2', 'ER0']), 'unable': ('JJ', ['AH0', 'N', 'EY1', 'B', 'AH0', 'L']), 'unabridged': ('JJ', ['AH2', 'N', 'AH0', 'B', 'R', 'IH1', 'JH', 'D']), 'unacceptable': ('JJ', ['AH2', 'N', 'AE0', 'K', 'S', 'EH1', 'P', 'T', 'AH0', 'B', 'AH0', 'L']), 'unaccountable': ('JJ', ['AH2', 'N', 'AH0', 'K', 'AW1', 'N', 'T', 'AH0', 'B', 'AH0', 'L']), 'unaccustomed': ('JJ', ['AH2', 'N', 'AH0', 'K', 'AH1', 'S', 'T', 'AH0', 'M', 'D']), 'unadulterated': ('JJ', ['AH2', 'N', 'AH0', 'D', 'AH1', 'L', 'T', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'unaffected': ('JJ', ['AH2', 'N', 'AH0', 'F', 'EH1', 'K', 'T', 'IH0', 'D']), 'unalienable': ('JJ', ['AH0', 'N', 'EY1', 'L', 'IY0', 'EH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'unalloyed': ('JJ', ['AH0', 'N', 'AE1', 'L', 'OY2', 'D']), 'unanimity': ('NN', ['Y', 'UW2', 'N', 'AH0', 'N', 'IH1', 'M', 'AH0', 'T', 'IY0']), 'unanimous': ('JJ', ['Y', 'UW0', 'N', 'AE1', 'N', 'AH0', 'M', 'AH0', 'S']), 'unanswerable': ('JJ', ['AH2', 'N', 'AE2', 'N', 'S', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'unanswered': ('JJ', ['AH2', 'N', 'AE1', 'N', 'S', 'ER0', 'D']), 'unapproved': ('JJ', ['AH2', 'N', 'AH0', 'P', 'R', 'UW1', 'V', 'D']), 'unarm': ('NN', ['AH0', 'N', 'AA1', 'R', 'M']), 'unarmed': ('JJ', ['AH0', 'N', 'AA1', 'R', 'M', 'D']), 'unassuming': ('VBG', ['AH2', 'N', 'AH0', 'S', 'UW1', 'M', 'IH0', 'NG']), 'unattached': ('JJ', ['AH2', 'N', 'AH0', 'T', 'AE1', 'CH', 'T']), 'unavoidable': ('JJ', ['AH2', 'N', 'AH0', 'V', 'OY1', 'D', 'AH0', 'B', 'AH0', 'L']), 'unaware': ('NN', ['AH2', 'N', 'AH0', 'W', 'EH1', 'R']), 'unawares': ('NNS', ['AH2', 'N', 'AH0', 'W', 'EH1', 'R', 'Z']), 'unbalanced': ('JJ', ['AH0', 'N', 'B', 'AE1', 'L', 'AH0', 'N', 'S', 'T']), 'unbecoming': ('VBG', ['AH2', 'N', 'B', 'IH0', 'K', 'AH1', 'M', 'IH0', 'NG']), 'unbeknown': ('JJ', ['AH2', 'N', 'B', 'IH0', 'N', 'OW1', 'N']), 'unbelieving': ('VBG', ['AH2', 'N', 'B', 'AH0', 'L', 'IY1', 'V', 'IH0', 'NG']), 'unbending': ('VBG', ['AH2', 'N', 'B', 'EH1', 'N', 'D', 'IH0', 'NG']), 'unbend': ('NN', ['AH2', 'N', 'B', 'EH1', 'N', 'D']), 'unbiased': ('JJ', ['AH2', 'N', 'B', 'AY1', 'AH0', 'S', 'T']), 'unbound': ('NN', ['AH0', 'N', 'B', 'AW1', 'N', 'D']), 'unblemished': ('JJ', ['AH0', 'N', 'B', 'L', 'EH1', 'M', 'IH0', 'SH', 'T']), 'unbolt': ('NN', ['AH1', 'N', 'B', 'OW2', 'L', 'T']), 'unborn': ('JJ', ['AH1', 'N', 'B', 'AO1', 'R', 'N']), 'unbounded': ('JJ', ['AH0', 'N', 'B', 'AW1', 'N', 'D', 'IH0', 'D']), 'unbowed': ('JJ', ['AH0', 'N', 'B', 'OW1', 'D']), 'unbridled': ('JJ', ['AH0', 'N', 'B', 'R', 'AY1', 'D', 'AH0', 'L', 'D']), 'unbroken': ('JJ', ['AH0', 'N', 'B', 'R', 'OW1', 'K', 'AH0', 'N']), 'unbundle': ('JJ', ['AH0', 'N', 'B', 'AH1', 'N', 'D', 'AH0', 'L']), 'unburden': ('JJ', ['AH0', 'N', 'B', 'ER1', 'D', 'AH0', 'N']), 'unbutton': ('NN', ['AH0', 'N', 'B', 'AH1', 'T', 'AH0', 'N']), 'uncanny': ('JJ', ['AH0', 'N', 'K', 'AE1', 'N', 'IY0']), 'uncertain': ('JJ', ['AH0', 'N', 'S', 'ER1', 'T', 'AH0', 'N']), 'uncertainly': ('RB', ['AH0', 'N', 'S', 'ER1', 'T', 'AH0', 'N', 'L', 'IY0']), 'uncertainties': ('NNS', ['AH0', 'N', 'S', 'ER1', 'T', 'AH0', 'N', 'T', 'IY0', 'Z']), 'uncertainty': ('NN', ['AH0', 'N', 'S', 'ER1', 'T', 'AH0', 'N', 'T', 'IY0']), 'uncivil': ('JJ', ['AH0', 'N', 'S', 'IH1', 'V', 'AH0', 'L']), 'uncivilized': ('JJ', ['AH0', 'N', 'S', 'IH1', 'V', 'AH0', 'L', 'AY0', 'Z', 'D']), 'uncle': ('NN', ['AH1', 'NG', 'K', 'AH0', 'L']), 'unclean': ('JJ', ['AH0', 'N', 'K', 'L', 'IY1', 'N']), 'uncoil': ('NN', ['AH2', 'N', 'K', 'OY1', 'L']), 'uncomfortable': ('JJ', ['AH0', 'N', 'K', 'AH1', 'M', 'F', 'ER0', 'T', 'AH0', 'B', 'AH0', 'L']), 'uncommon': ('JJ', ['AH0', 'N', 'K', 'AA1', 'M', 'AH0', 'N']), 'uncomplete': ('JJ', ['AH2', 'N', 'K', 'AH0', 'M', 'P', 'L', 'IY1', 'T']), 'uncompromising': ('VBG', ['AH0', 'N', 'K', 'AA1', 'M', 'P', 'R', 'AH0', 'M', 'AY0', 'Z', 'IH0', 'NG']), 'unconcern': ('JJ', ['AH2', 'N', 'K', 'AH0', 'N', 'S', 'ER1', 'N']), 'unconcerned': ('JJ', ['AH2', 'N', 'K', 'AH0', 'N', 'S', 'ER1', 'N', 'D']), 'unconditional': ('JJ', ['AH2', 'N', 'K', 'AH0', 'N', 'D', 'IH1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'unconscionable': ('JJ', ['AH0', 'N', 'K', 'AA1', 'N', 'SH', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'unconscious': ('JJ', ['AH2', 'N', 'K', 'AA1', 'N', 'SH', 'AH0', 'S']), 'unconstitutional': ('JJ', ['AH2', 'N', 'K', 'AA2', 'N', 'S', 'T', 'AH0', 'T', 'UW1', 'SH', 'AH0', 'N', 'AH0', 'L']), 'uncontrollable': ('JJ', ['AH2', 'N', 'K', 'AH0', 'N', 'T', 'R', 'OW1', 'L', 'AH0', 'B', 'AH0', 'L']), 'unconverted': ('JJ', ['AH2', 'N', 'K', 'AH0', 'N', 'V', 'ER1', 'T', 'IH0', 'D']), 'uncork': ('NN', ['AH0', 'N', 'K', 'AO1', 'R', 'K']), 'uncouple': ('JJ', ['AH0', 'N', 'K', 'AH1', 'P', 'AH0', 'L']), 'uncouth': ('NN', ['AH1', 'N', 'K', 'UW1', 'TH']), 'uncovered': ('JJ', ['AH0', 'N', 'K', 'AH1', 'V', 'ER0', 'D']), 'uncovering': ('VBG', ['AH0', 'N', 'K', 'AH1', 'V', 'ER0', 'IH0', 'NG']), 'uncover': ('NN', ['AH0', 'N', 'K', 'AH1', 'V', 'ER0']), 'unctuous': ('JJ', ['AH1', 'NG', 'CH', 'W', 'AH0', 'S']), 'uncut': ('NN', ['AH2', 'N', 'K', 'AH1', 'T']), 'undated': ('JJ', ['AH2', 'N', 'D', 'EY1', 'T', 'IH0', 'D']), 'undaunted': ('JJ', ['AH0', 'N', 'D', 'AO1', 'N', 'T', 'IH0', 'D']), 'undeniable': ('JJ', ['AH2', 'N', 'D', 'IH0', 'N', 'AY1', 'AH0', 'B', 'AH0', 'L']), 'undeniably': ('RB', ['AH2', 'N', 'D', 'IH0', 'N', 'AY1', 'AH0', 'B', 'L', 'IY0']), 'under': ('IN', ['AH1', 'N', 'D', 'ER0']), 'under-age': ('NN', ['AH1', 'N', 'D', 'ER0', 'EY1', 'JH']), 'underbid': ('NN', ['AH1', 'N', 'D', 'ER0', 'B', 'IH2', 'D']), 'underbrush': ('NN', ['AH1', 'N', 'D', 'ER0', 'B', 'R', 'AH2', 'SH']), 'undercoat': ('NN', ['AH1', 'N', 'D', 'ER0', 'K', 'OW2', 'T']), 'undercurrent': ('NN', ['AH1', 'N', 'D', 'ER0', 'K', 'ER2', 'AH0', 'N', 'T']), 'undercut': ('NN', ['AH1', 'N', 'D', 'ER0', 'K', 'AH2', 'T']), 'underestimate': ('NN', ['AH1', 'N', 'D', 'ER0', 'EH1', 'S', 'T', 'AH0', 'M', 'EY2', 'T']), 'underfoot': ('NN', ['AH2', 'N', 'D', 'ER0', 'F', 'UH1', 'T']), 'underwent': ('NN', ['AH2', 'N', 'D', 'ER0', 'W', 'EH1', 'N', 'T']), 'undergone': ('NN', ['AH2', 'N', 'D', 'ER0', 'G', 'AO1', 'N']), 'undergoing': ('VBG', ['AH2', 'N', 'D', 'ER0', 'G', 'OW1', 'IH0', 'NG']), 'undergo': ('NN', ['AH2', 'N', 'D', 'ER0', 'G', 'OW1']), 'undergraduate': ('NN', ['AH2', 'N', 'D', 'ER0', 'G', 'R', 'AE1', 'JH', 'AH0', 'W', 'AH0', 'T']), 'underground': ('NN', ['AH1', 'N', 'D', 'ER0', 'G', 'R', 'AW2', 'N', 'D']), 'undergrowth': ('NN', ['AH1', 'N', 'D', 'ER0', 'G', 'R', 'OW2', 'TH']), 'underhanded': ('JJ', ['AH1', 'N', 'D', 'ER0', 'HH', 'AE1', 'N', 'D', 'IH0', 'D']), 'underlie': ('NN', ['AH2', 'N', 'D', 'ER0', 'L', 'AY1']), 'underline': ('NN', ['AH1', 'N', 'D', 'ER0', 'L', 'AY2', 'N']), 'underling': ('VBG', ['AH1', 'N', 'D', 'ER0', 'L', 'IH0', 'NG']), 'underlying': ('VBG', ['AH2', 'N', 'D', 'ER0', 'L', 'AY1', 'IH0', 'NG']), 'undermanned': ('JJ', ['AH1', 'N', 'D', 'ER0', 'M', 'AE2', 'N', 'D']), 'undermine': ('NN', ['AH1', 'N', 'D', 'ER0', 'M', 'AY2', 'N']), 'underneath': ('NN', ['AH2', 'N', 'D', 'ER0', 'N', 'IY1', 'TH']), 'underpay': ('NN', ['AH2', 'N', 'D', 'ER0', 'P', 'EY1']), 'underpinned': ('JJ', ['AH1', 'N', 'D', 'ER0', 'P', 'IH2', 'N', 'D']), 'underpinning': ('VBG', ['AH1', 'N', 'D', 'ER0', 'P', 'IH2', 'N', 'IH0', 'NG']), 'underpin': ('NN', ['AH1', 'N', 'D', 'ER0', 'P', 'IH2', 'N']), 'underplay': ('NN', ['AH0', 'N', 'D', 'ER0', 'P', 'L', 'EY1']), 'underrate': ('NN', ['AH0', 'N', 'D', 'ER0', 'R', 'EY1', 'T']), 'underscore': ('NN', ['AH2', 'N', 'D', 'ER0', 'S', 'K', 'AO1', 'R']), 'undersecretary': ('JJ', ['AH2', 'N', 'D', 'ER0', 'S', 'EH1', 'K', 'R', 'IH0', 'T', 'EH2', 'R', 'IY0']), 'undersell': ('NN', ['AH1', 'N', 'D', 'ER0', 'S', 'EH2', 'L']), 'undershirt': ('NN', ['AH1', 'N', 'D', 'ER0', 'SH', 'ER2', 'T']), 'underside': ('NN', ['AH1', 'N', 'D', 'ER0', 'S', 'AY2', 'D']), 'undersized': ('JJ', ['AH1', 'N', 'D', 'ER0', 'S', 'AY2', 'Z', 'D']), 'undersold': ('JJ', ['AH0', 'N', 'D', 'ER0', 'S', 'OW1', 'L', 'D']), 'understood': ('NN', ['AH2', 'N', 'D', 'ER0', 'S', 'T', 'UH1', 'D']), 'understanding': ('VBG', ['AH2', 'N', 'D', 'ER0', 'S', 'T', 'AE1', 'N', 'D', 'IH0', 'NG']), 'understand': ('NN', ['AH2', 'N', 'D', 'ER0', 'S', 'T', 'AE1', 'N', 'D']), 'understandable': ('JJ', ['AH2', 'N', 'D', 'ER0', 'S', 'T', 'AE1', 'N', 'D', 'AH0', 'B', 'AH0', 'L']), 'understate': ('NN', ['AH1', 'N', 'D', 'ER0', 'S', 'T', 'EY2', 'T']), 'understatement': ('NN', ['AH1', 'N', 'D', 'ER0', 'S', 'T', 'EY2', 'T', 'M', 'AH0', 'N', 'T']), 'understudy': ('NN', ['AH1', 'N', 'D', 'ER0', 'S', 'T', 'AH2', 'D', 'IY0']), 'undertook': ('NN', ['AH2', 'N', 'D', 'ER0', 'T', 'UH1', 'K']), 'undertaken': ('JJ', ['AH1', 'N', 'D', 'ER0', 'T', 'EY2', 'K', 'AH0', 'N']), 'undertaking': ('NN', ['AH1', 'N', 'D', 'ER0', 'T', 'EY2', 'K', 'IH0', 'NG']), 'undertake': ('NN', ['AH1', 'N', 'D', 'ER0', 'T', 'EY2', 'K']), 'undertaker': ('NN', ['AH1', 'N', 'D', 'ER0', 'T', 'EY2', 'K', 'ER0']), 'undertone': ('NN', ['AH1', 'N', 'D', 'ER0', 'T', 'OW2', 'N']), 'undertow': ('NN', ['AH1', 'N', 'D', 'ER0', 'T', 'OW2']), 'undervaluation': ('NN', ['AH1', 'N', 'D', 'ER0', 'V', 'AE2', 'L', 'Y', 'UW0', 'EY1', 'SH', 'AH0', 'N']), 'undervalue': ('JJ', ['AH1', 'N', 'D', 'ER0', 'V', 'AE2', 'L', 'Y', 'UW0']), 'underwear': ('JJ', ['AH1', 'N', 'D', 'ER0', 'W', 'EH2', 'R']), 'underwood': ('NN', ['AH1', 'N', 'D', 'ER0', 'W', 'UH2', 'D']), 'underworld': ('NN', ['AH1', 'N', 'D', 'ER0', 'W', 'ER2', 'L', 'D']), 'underwrote': ('NN', ['AH2', 'N', 'D', 'ER0', 'R', 'OW1', 'T']), 'underwritten': ('JJ', ['AH1', 'N', 'D', 'ER0', 'R', 'IH2', 'T', 'AH0', 'N']), 'underwriting': ('NN', ['AH1', 'N', 'D', 'ER0', 'R', 'AY1', 'T', 'IH0', 'NG']), 'underwrite': ('JJ', ['AH1', 'N', 'D', 'ER0', 'R', 'AY2', 'T']), 'underwriter': ('NN', ['AH1', 'N', 'D', 'ER0', 'R', 'AY2', 'T', 'ER0']), 'undid': ('JJ', ['AH0', 'N', 'D', 'IH1', 'D']), 'undifferentiated': ('JJ', ['AH0', 'N', 'D', 'IH0', 'F', 'ER0', 'EH1', 'N', 'SH', 'IY0', 'EY2', 'T', 'IH0', 'D']), 'undine': ('NN', ['AH1', 'N', 'D', 'IY2', 'N']), 'undivided': ('JJ', ['AH2', 'N', 'D', 'AH0', 'V', 'AY1', 'D', 'IH0', 'D']), 'undo': ('NN', ['AH0', 'N', 'D', 'UW1']), 'undock': ('NN', ['AH0', 'N', 'D', 'AA1', 'K']), 'undoing': ('VBG', ['AH0', 'N', 'D', 'UW1', 'IH0', 'NG']), 'undone': ('NN', ['AH0', 'N', 'D', 'AH1', 'N']), 'undoubted': ('JJ', ['AH0', 'N', 'D', 'AW1', 'T', 'IH0', 'D']), 'undreamed': ('JJ', ['AH0', 'N', 'D', 'R', 'IY1', 'M', 'D']), 'undress': ('NN', ['AH0', 'N', 'D', 'R', 'EH1', 'S']), 'undue': ('JJ', ['AH0', 'N', 'D', 'UW1']), 'undulate': ('NN', ['AH1', 'N', 'JH', 'AH0', 'L', 'EY2', 'T']), 'undulated': ('JJ', ['AH1', 'N', 'JH', 'AH0', 'L', 'EY2', 'T', 'AH0', 'D']), 'undulating': ('VBG', ['AH1', 'N', 'JH', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'unduly': ('RB', ['AH0', 'N', 'D', 'UW1', 'L', 'IY0']), 'undying': ('VBG', ['AH0', 'N', 'D', 'AY1', 'IH0', 'NG']), 'unearned': ('JJ', ['AH0', 'N', 'ER1', 'N', 'D']), 'unearthed': ('JJ', ['AH0', 'N', 'ER1', 'TH', 'T']), 'unearthing': ('VBG', ['AH0', 'N', 'ER1', 'TH', 'IH0', 'NG']), 'unearth': ('NN', ['AH0', 'N', 'ER1', 'TH']), 'unearthly': ('RB', ['AH0', 'N', 'ER1', 'TH', 'L', 'IY0']), 'unease': ('NN', ['AH0', 'N', 'IY1', 'Z']), 'uneasiness': ('NN', ['AH2', 'N', 'IY1', 'Z', 'IY0', 'N', 'AH0', 'S']), 'uneasy': ('JJ', ['AH0', 'N', 'IY1', 'Z', 'IY0']), 'unemployed': ('JJ', ['AH2', 'N', 'EH0', 'M', 'P', 'L', 'OY1', 'D']), 'unencumber': ('NN', ['AH2', 'N', 'EH0', 'N', 'K', 'AH1', 'M', 'B', 'ER0']), 'unequal': ('JJ', ['AH0', 'N', 'IY1', 'K', 'W', 'AH0', 'L']), 'unequaled': ('JJ', ['AH0', 'N', 'IY1', 'K', 'W', 'AH2', 'L', 'D']), 'unequivocal': ('JJ', ['AH2', 'N', 'IH0', 'K', 'W', 'IH1', 'V', 'AH0', 'K', 'AH0', 'L']), 'uneven': ('RB', ['AH0', 'N', 'IY1', 'V', 'AH0', 'N']), 'unexpected': ('JJ', ['AH2', 'N', 'IH0', 'K', 'S', 'P', 'EH1', 'K', 'T', 'IH0', 'D']), 'unfailing': ('VBG', ['AH0', 'N', 'F', 'EY1', 'L', 'IH0', 'NG']), 'unfair': ('NN', ['AH0', 'N', 'F', 'EH1', 'R']), 'unfaithful': ('JJ', ['AH0', 'N', 'F', 'EY1', 'TH', 'F', 'AH0', 'L']), 'unfavorable': ('JJ', ['AH2', 'N', 'F', 'EY1', 'V', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'unfeeling': ('VBG', ['AH0', 'N', 'F', 'IY1', 'L', 'IH0', 'NG']), 'unfinished': ('JJ', ['AH0', 'N', 'F', 'IH1', 'N', 'IH0', 'SH', 'T']), 'unfit': ('NN', ['AH0', 'N', 'F', 'IH1', 'T']), 'unflinching': ('VBG', ['AH0', 'N', 'F', 'L', 'IH1', 'N', 'CH', 'IH0', 'NG']), 'unfold': ('JJ', ['AH0', 'N', 'F', 'OW1', 'L', 'D']), 'unforeseeable': ('JJ', ['AH2', 'N', 'F', 'AO0', 'R', 'S', 'IY1', 'AH0', 'B', 'AH0', 'L']), 'unforgettable': ('JJ', ['AH2', 'N', 'F', 'ER0', 'G', 'EH1', 'T', 'AH0', 'B', 'AH0', 'L']), 'unformed': ('JJ', ['AH0', 'N', 'F', 'AO1', 'R', 'M', 'D']), 'unfortunate': ('NN', ['AH0', 'N', 'F', 'AO1', 'R', 'CH', 'AH0', 'N', 'AH0', 'T']), 'unfounded': ('JJ', ['AH0', 'N', 'F', 'AW1', 'N', 'D', 'IH0', 'D']), 'unfriendly': ('RB', ['AH0', 'N', 'F', 'R', 'EH1', 'N', 'D', 'L', 'IY0']), 'unfurl': ('NN', ['AH0', 'N', 'F', 'ER1', 'L']), 'ungainly': ('RB', ['AH0', 'N', 'G', 'EY1', 'N', 'L', 'IY0']), 'unglue': ('JJ', ['AH0', 'N', 'G', 'L', 'UW1']), 'ungodly': ('RB', ['AH0', 'N', 'G', 'AO1', 'D', 'L', 'IY0']), 'ungovernable': ('JJ', ['AH0', 'N', 'G', 'AH1', 'V', 'ER0', 'N', 'AH0', 'B', 'AH0', 'L']), 'ungrateful': ('JJ', ['AH0', 'N', 'G', 'R', 'EY1', 'T', 'F', 'AH0', 'L']), 'unhappy': ('JJ', ['AH0', 'N', 'HH', 'AE1', 'P', 'IY0']), 'unheard': ('JJ', ['AH0', 'N', 'HH', 'ER1', 'D']), 'unhinge': ('NN', ['AH0', 'N', 'HH', 'IH1', 'N', 'JH']), 'unhitch': ('NN', ['AH0', 'N', 'HH', 'IH1', 'CH']), 'unholy': ('NN', ['AH0', 'N', 'HH', 'OW1', 'L', 'IY0']), 'unhook': ('NN', ['AH0', 'N', 'HH', 'UH1', 'K']), 'unicellular': ('JJ', ['Y', 'UW2', 'N', 'IH0', 'S', 'EH1', 'L', 'Y', 'AH0', 'L', 'ER0']), 'unicorn': ('JJ', ['Y', 'UW1', 'N', 'IH0', 'K', 'AO2', 'R', 'N']), 'unification': ('NN', ['Y', 'UW2', 'N', 'AH0', 'F', 'AH0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'uniform': ('NN', ['Y', 'UW1', 'N', 'AH0', 'F', 'AO2', 'R', 'M']), 'uniformity': ('NN', ['Y', 'UW2', 'N', 'AH0', 'F', 'AO1', 'R', 'M', 'AH0', 'T', 'IY0']), 'uniformly': ('RB', ['Y', 'UW1', 'N', 'AH0', 'F', 'AO2', 'R', 'M', 'L', 'IY0']), 'unified': ('JJ', ['Y', 'UW1', 'N', 'AH0', 'F', 'AY2', 'D']), 'unifying': ('VBG', ['Y', 'UW1', 'N', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'unify': ('NN', ['Y', 'UW1', 'N', 'AH0', 'F', 'AY2']), 'unilateral': ('JJ', ['Y', 'UW2', 'N', 'AH0', 'L', 'AE1', 'T', 'ER0', 'AH0', 'L']), 'unimpeachable': ('JJ', ['AH2', 'N', 'IH0', 'M', 'P', 'IY1', 'CH', 'AH0', 'B', 'AH0', 'L']), 'unimproved': ('JJ', ['AH2', 'N', 'IH0', 'M', 'P', 'R', 'UW1', 'V', 'D']), 'uninterested': ('JJ', ['AH0', 'N', 'IH1', 'N', 'T', 'R', 'AH0', 'S', 'T', 'AH0', 'D']), 'union': ('NN', ['Y', 'UW1', 'N', 'Y', 'AH0', 'N']), 'unionism': ('NN', ['Y', 'UW1', 'N', 'Y', 'AH0', 'N', 'IH2', 'Z', 'AH0', 'M']), 'unionist': ('NN', ['Y', 'UW1', 'N', 'Y', 'AH0', 'N', 'AH0', 'S', 'T']), 'unique': ('NN', ['Y', 'UW0', 'N', 'IY1', 'K']), 'unison': ('NN', ['Y', 'UW1', 'N', 'AH0', 'S', 'AH0', 'N']), 'unit': ('NN', ['Y', 'UW1', 'N', 'AH0', 'T']), 'unitarian': ('JJ', ['Y', 'UW2', 'N', 'AH0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'unitary': ('JJ', ['Y', 'UW1', 'N', 'IH0', 'T', 'EH2', 'R', 'IY0']), 'united': ('JJ', ['Y', 'UW0', 'N', 'AY1', 'T', 'AH0', 'D']), 'uniting': ('VBG', ['Y', 'UW0', 'N', 'AY1', 'T', 'IH0', 'NG']), 'unite': ('JJ', ['Y', 'UW1', 'N', 'AY2', 'T']), 'unity': ('NN', ['Y', 'UW1', 'N', 'AH0', 'T', 'IY0']), 'universal': ('NN', ['Y', 'UW2', 'N', 'AH0', 'V', 'ER1', 'S', 'AH0', 'L']), 'universality': ('NN', ['Y', 'UW2', 'N', 'AH0', 'V', 'ER0', 'S', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'universally': ('RB', ['Y', 'UW2', 'N', 'AH0', 'V', 'ER1', 'S', 'AH0', 'L', 'IY0']), 'universe': ('NN', ['Y', 'UW1', 'N', 'AH0', 'V', 'ER2', 'S']), 'universities': ('NNS', ['Y', 'UW2', 'N', 'AH0', 'V', 'ER1', 'S', 'AH0', 'T', 'IY0', 'Z']), 'university': ('NN', ['Y', 'UW2', 'N', 'AH0', 'V', 'ER1', 'S', 'AH0', 'T', 'IY0']), 'unjust': ('JJ', ['AH0', 'N', 'JH', 'AH1', 'S', 'T']), 'unkempt': ('JJ', ['AH0', 'N', 'K', 'EH1', 'M', 'P', 'T']), 'unkind': ('NN', ['AH0', 'N', 'K', 'AY1', 'N', 'D']), 'unknown': ('JJ', ['AH0', 'N', 'N', 'OW1', 'N']), 'unland': ('NN', ['AH1', 'N', 'L', 'AH0', 'N', 'D']), 'unlawful': ('JJ', ['AH0', 'N', 'L', 'AO1', 'F', 'AH0', 'L']), 'unleash': ('NN', ['AH0', 'N', 'L', 'IY1', 'SH']), 'unless': ('IN', ['AH0', 'N', 'L', 'EH1', 'S']), 'unlike': ('IN', ['AH0', 'N', 'L', 'AY1', 'K']), 'unlikely': ('JJ', ['AH0', 'N', 'L', 'AY1', 'K', 'L', 'IY0']), 'unlimited': ('JJ', ['AH0', 'N', 'L', 'IH1', 'M', 'AH0', 'T', 'AH0', 'D']), 'unload': ('NN', ['AH0', 'N', 'L', 'OW1', 'D']), 'unlock': ('NN', ['AH0', 'N', 'L', 'AA1', 'K']), 'unlucky': ('JJ', ['AH0', 'N', 'L', 'AH1', 'K', 'IY0']), 'unmade': ('NN', ['AH0', 'N', 'M', 'EY1', 'D']), 'unmanned': ('JJ', ['AH0', 'N', 'M', 'AE1', 'N', 'D']), 'unmask': ('NN', ['AH0', 'N', 'M', 'AE1', 'S', 'K']), 'unmistakable': ('JJ', ['AH2', 'N', 'M', 'IH0', 'S', 'T', 'EY1', 'K', 'AH0', 'B', 'AH0', 'L']), 'unmoved': ('JJ', ['AH0', 'N', 'M', 'UW1', 'V', 'D']), 'unnatural': ('JJ', ['AH0', 'N', 'N', 'AE1', 'CH', 'ER0', 'AH0', 'L']), 'unnerve': ('NN', ['AH0', 'N', 'ER1', 'V']), 'unobtrusive': ('JJ', ['AH2', 'N', 'AH0', 'B', 'T', 'R', 'UW1', 'S', 'IH0', 'V']), 'unorganized': ('JJ', ['AH0', 'N', 'AO1', 'R', 'G', 'AH0', 'N', 'AY2', 'Z', 'D']), 'unpack': ('NN', ['AH0', 'N', 'P', 'AE1', 'K']), 'unparalleled': ('JJ', ['AH0', 'N', 'P', 'EH1', 'R', 'AH0', 'L', 'EH2', 'L', 'D']), 'unplaced': ('JJ', ['AH0', 'N', 'P', 'L', 'EY1', 'S', 'T']), 'unpleasant': ('JJ', ['AH0', 'N', 'P', 'L', 'EH1', 'Z', 'AH0', 'N', 'T']), 'unprecedented': ('JJ', ['AH0', 'N', 'P', 'R', 'EH1', 'S', 'IH0', 'D', 'EH2', 'N', 'T', 'IH0', 'D']), 'unprincipled': ('JJ', ['AH0', 'N', 'P', 'R', 'IH1', 'N', 'S', 'AH0', 'P', 'AH0', 'L', 'D']), 'unquestionable': ('JJ', ['AH0', 'N', 'K', 'W', 'EH1', 'S', 'CH', 'AH0', 'N', 'AH0', 'B', 'AH0', 'L']), 'unquestioned': ('JJ', ['AH0', 'N', 'K', 'W', 'EH1', 'S', 'CH', 'AH0', 'N', 'D']), 'unravel': ('NN', ['AH0', 'N', 'R', 'AE1', 'V', 'AH0', 'L']), 'unread': ('JJ', ['AH0', 'N', 'R', 'EH1', 'D']), 'unreal': ('NN', ['AH0', 'N', 'R', 'IY1', 'L']), 'unreality': ('NN', ['AH2', 'N', 'R', 'IY0', 'AE1', 'L', 'AH0', 'T', 'IY0']), 'unreasonable': ('JJ', ['AH0', 'N', 'R', 'IY1', 'Z', 'N', 'AH0', 'B', 'AH0', 'L']), 'unredeemed': ('JJ', ['AH2', 'N', 'R', 'IY0', 'D', 'IY1', 'M', 'D']), 'unrein': ('NN', ['AO1', 'N', 'R', 'AY0', 'N']), 'unrelenting': ('VBG', ['AH2', 'N', 'R', 'IY0', 'L', 'EH1', 'N', 'T', 'IH0', 'NG']), 'unreliable': ('JJ', ['AH2', 'N', 'R', 'IH0', 'L', 'AY1', 'AH0', 'B', 'AH0', 'L']), 'unremitting': ('VBG', ['AH2', 'N', 'R', 'IH0', 'M', 'IH1', 'T', 'IH0', 'NG']), 'unrest': ('NN', ['AH0', 'N', 'R', 'EH1', 'S', 'T']), 'unrivaled': ('JJ', ['AH0', 'N', 'R', 'AY1', 'V', 'AH0', 'L', 'D']), 'unruffled': ('JJ', ['AH0', 'N', 'R', 'AH1', 'F', 'AH0', 'L', 'D']), 'unruly': ('RB', ['AH0', 'N', 'R', 'UW1', 'L', 'IY0']), 'unsalable': ('JJ', ['AH0', 'N', 'S', 'EY1', 'L', 'AH0', 'B', 'AH0', 'L']), 'unsaturated': ('JJ', ['AH0', 'N', 'S', 'AE1', 'CH', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'unscrupulous': ('JJ', ['AH0', 'N', 'S', 'K', 'R', 'UW1', 'P', 'Y', 'AH0', 'L', 'AH0', 'S']), 'unseal': ('NN', ['AH0', 'N', 'S', 'IY1', 'L']), 'unseat': ('NN', ['AH0', 'N', 'S', 'IY1', 'T']), 'unseemly': ('RB', ['AH0', 'N', 'S', 'IY1', 'M', 'L', 'IY0']), 'unseen': ('JJ', ['AH0', 'N', 'S', 'IY1', 'N']), 'unset': ('NN', ['AH0', 'N', 'S', 'EH1', 'T']), 'unsettle': ('JJ', ['AH0', 'N', 'S', 'EH1', 'T', 'AH0', 'L']), 'unshackle': ('NN', ['AH0', 'N', 'SH', 'AE1', 'K', 'AH0', 'L']), 'unshakable': ('JJ', ['AH0', 'N', 'SH', 'EY1', 'K', 'AH0', 'B', 'AH0', 'L']), 'unsophisticated': ('JJ', ['AH2', 'N', 'S', 'AH0', 'F', 'IH1', 'S', 'T', 'IH0', 'K', 'EY2', 'T', 'IH0', 'D']), 'unsound': ('NN', ['AH0', 'N', 'S', 'AW1', 'N', 'D']), 'unsparing': ('VBG', ['AH0', 'N', 'S', 'P', 'EH1', 'R', 'IH0', 'NG']), 'unspeakable': ('JJ', ['AH0', 'N', 'S', 'P', 'IY1', 'K', 'AH0', 'B', 'AH0', 'L']), 'unstable': ('JJ', ['AH0', 'N', 'S', 'T', 'EY1', 'B', 'AH0', 'L']), 'unsuccessful': ('JJ', ['AH2', 'N', 'S', 'AH0', 'K', 'S', 'EH1', 'S', 'F', 'AH0', 'L']), 'unsupportable': ('JJ', ['AH2', 'N', 'S', 'AH0', 'P', 'AO1', 'R', 'T', 'AH0', 'B', 'AH0', 'L']), 'untangle': ('NN', ['AH0', 'N', 'T', 'AE1', 'NG', 'G', 'AH0', 'L']), 'unthinking': ('VBG', ['AH0', 'N', 'TH', 'IH1', 'NG', 'K', 'IH0', 'NG']), 'untidy': ('NN', ['AH0', 'N', 'T', 'AY1', 'D', 'IY0']), 'untie': ('NN', ['AH0', 'N', 'T', 'AY1']), 'until': ('IN', ['AH0', 'N', 'T', 'IH1', 'L']), 'untimely': ('RB', ['AH0', 'N', 'T', 'AY1', 'M', 'L', 'IY0']), 'unto': ('NN', ['AH1', 'N', 'T', 'UW0']), 'untold': ('JJ', ['AH0', 'N', 'T', 'OW1', 'L', 'D']), 'untoward': ('NN', ['AH0', 'N', 'T', 'UW0', 'AO1', 'R', 'D']), 'untrained': ('JJ', ['AH0', 'N', 'T', 'R', 'EY1', 'N', 'D']), 'untrammeled': ('JJ', ['AH0', 'N', 'T', 'R', 'AE1', 'M', 'AH0', 'L', 'D']), 'untreatable': ('JJ', ['AH0', 'N', 'T', 'R', 'IY1', 'T', 'AH0', 'B', 'AH0', 'L']), 'untrue': ('JJ', ['AH0', 'N', 'T', 'R', 'UW1']), 'untruth': ('NN', ['AH0', 'N', 'T', 'R', 'UW1', 'TH']), 'untruthful': ('JJ', ['AH0', 'N', 'T', 'R', 'UW1', 'TH', 'F', 'AH0', 'L']), 'unturned': ('JJ', ['AH0', 'N', 'T', 'ER1', 'N', 'D']), 'unused': ('JJ', ['AH0', 'N', 'Y', 'UW1', 'Z', 'D']), 'unusual': ('JJ', ['AH0', 'N', 'Y', 'UW1', 'ZH', 'AH0', 'W', 'AH0', 'L']), 'unveil': ('NN', ['AH0', 'N', 'V', 'EY1', 'L']), 'unwarranted': ('JJ', ['AH0', 'N', 'W', 'AO1', 'R', 'AH0', 'N', 'T', 'IH0', 'D']), 'unwary': ('JJ', ['AH0', 'N', 'W', 'EH1', 'R', 'IY0']), 'unwashed': ('JJ', ['AH0', 'N', 'W', 'AA1', 'SH', 'T']), 'unwieldy': ('NN', ['AH0', 'N', 'W', 'IY1', 'L', 'D', 'IY0']), 'unwilling': ('JJ', ['AH0', 'N', 'W', 'IH1', 'L', 'IH0', 'NG']), 'unwind': ('NN', ['AH0', 'N', 'W', 'AY1', 'N', 'D']), 'unwise': ('NN', ['AH0', 'N', 'W', 'AY1', 'Z']), 'unwisely': ('RB', ['AH0', 'N', 'W', 'AY1', 'Z', 'L', 'IY0']), 'unwitting': ('VBG', ['AH0', 'N', 'W', 'IH1', 'T', 'IH0', 'NG']), 'unworthy': ('JJ', ['AH0', 'N', 'W', 'ER1', 'DH', 'IY0']), 'unwrap': ('NN', ['AH0', 'N', 'R', 'AE1', 'P']), 'unwritten': ('JJ', ['AH0', 'N', 'R', 'IH1', 'T', 'AH0', 'N']), 'up': ('RB', ['AH1', 'P']), 'upbraided': ('JJ', ['AH0', 'P', 'B', 'R', 'EY1', 'D', 'IH0', 'D']), 'upbraid': ('NN', ['AH1', 'P', 'B', 'R', 'EY2', 'D']), 'upheaval': ('NN', ['AH0', 'P', 'HH', 'IY1', 'V', 'AH0', 'L']), 'upheld': ('NN', ['AH0', 'P', 'HH', 'EH1', 'L', 'D']), 'uphill': ('NN', ['AH1', 'P', 'HH', 'IH1', 'L']), 'uphold': ('JJ', ['AH0', 'P', 'HH', 'OW1', 'L', 'D']), 'upholster': ('NN', ['AH0', 'P', 'OW1', 'L', 'S', 'T', 'ER0']), 'upholstery': ('NN', ['AH0', 'P', 'OW1', 'L', 'S', 'T', 'ER0', 'IY0']), 'upland': ('NN', ['AH1', 'P', 'L', 'AH0', 'N', 'D']), 'uplifting': ('VBG', ['AH1', 'P', 'L', 'IH2', 'F', 'T', 'IH0', 'NG']), 'uplift': ('NN', ['AH1', 'P', 'L', 'IH0', 'F', 'T']), 'upon': ('IN', ['AH0', 'P', 'AA1', 'N']), 'upper': ('JJ', ['AH1', 'P', 'ER0']), 'uppermost': ('NN', ['AH1', 'P', 'ER0', 'M', 'OW2', 'S', 'T']), 'upright': ('JJ', ['AH0', 'P', 'R', 'AY1', 'T']), 'uprise': ('NN', ['AH1', 'P', 'R', 'AY0', 'Z']), 'uprising': ('VBG', ['AH0', 'P', 'R', 'AY1', 'Z', 'IH0', 'NG']), 'uproar': ('NN', ['AH1', 'P', 'R', 'AO2', 'R']), 'uproot': ('NN', ['AH0', 'P', 'R', 'UW1', 'T']), 'upset': ('NN', ['AH0', 'P', 'S', 'EH1', 'T']), 'upsetting': ('VBG', ['AH0', 'P', 'S', 'EH1', 'T', 'IH0', 'NG']), 'upshot': ('JJ', ['AH1', 'P', 'SH', 'AA2', 'T']), 'upside': ('NN', ['AH1', 'P', 'S', 'AY1', 'D']), 'upstairs': ('NNS', ['AH0', 'P', 'S', 'T', 'EH1', 'R', 'Z']), 'upstart': ('NN', ['AH1', 'P', 'S', 'T', 'AA2', 'R', 'T']), 'upstream': ('NN', ['AH1', 'P', 'S', 'T', 'R', 'IY1', 'M']), 'uptake': ('NN', ['AH1', 'P', 'T', 'EY2', 'K']), 'uptown': ('JJ', ['AH1', 'P', 'T', 'AW1', 'N']), 'upturn': ('NN', ['AH0', 'P', 'T', 'ER1', 'N']), 'upward': ('RB', ['AH1', 'P', 'W', 'ER0', 'D']), 'upwards': ('NNS', ['AH1', 'P', 'W', 'ER0', 'D', 'Z']), 'ur': ('NN', ['ER1']), 'ure': ('NN', ['Y', 'UW1', 'R']), 'ural': ('JJ', ['Y', 'UH1', 'R', 'AH0', 'L']), 'uranium': ('NN', ['Y', 'ER0', 'EY1', 'N', 'IY0', 'AH0', 'M']), 'uranus': ('NN', ['Y', 'UW1', 'R', 'AH0', 'N', 'AH0', 'S']), 'urban': ('JJ', ['ER1', 'B', 'AH0', 'N']), 'urbane': ('NN', ['ER0', 'B', 'EY1', 'N']), 'urbanize': ('NN', ['ER1', 'B', 'AH0', 'N', 'AY2', 'Z']), 'urchin': ('JJ', ['ER1', 'CH', 'AH0', 'N']), 'urdu': ('NN', ['ER0', 'D', 'UW1']), 'urea': ('NN', ['Y', 'ER0', 'IY1', 'AH0']), 'urethane': ('NN', ['Y', 'ER1', 'AH0', 'TH', 'EY2', 'N']), 'urethra': ('NN', ['Y', 'ER2', 'IY1', 'TH', 'R', 'AH0']), 'urged': ('VBD', ['ER1', 'JH', 'D']), 'urging': ('VBG', ['ER1', 'JH', 'IH0', 'NG']), 'urge': ('NN', ['ER1', 'JH']), 'urgency': ('NN', ['ER1', 'JH', 'AH0', 'N', 'S', 'IY0']), 'urgent': ('NN', ['ER1', 'JH', 'AH0', 'N', 'T']), 'urgently': ('RB', ['ER1', 'JH', 'AH0', 'N', 'T', 'L', 'IY0']), 'uric': ('JJ', ['Y', 'ER1', 'IH0', 'K']), 'urinary': ('JJ', ['Y', 'ER1', 'AH0', 'N', 'EH2', 'R', 'IY0']), 'urinate': ('NN', ['Y', 'ER1', 'AH0', 'N', 'EY2', 'T']), 'urine': ('NN', ['Y', 'ER1', 'AH0', 'N']), 'urn': ('NN', ['ER1', 'N']), 'urology': ('NN', ['Y', 'ER0', 'AA1', 'L', 'AH0', 'JH', 'IY0']), 'ursa': ('NN', ['ER1', 'S', 'AH0']), 'urson': ('NN', ['ER1', 'S', 'AH0', 'N']), 'ursula': ('NN', ['ER1', 'S', 'AH0', 'L', 'AH0']), 'ursus': ('NN', ['ER1', 'S', 'AH0', 'S']), 'usable': ('JJ', ['Y', 'UW1', 'Z', 'AH0', 'B', 'AH0', 'L']), 'usage': ('NN', ['Y', 'UW1', 'S', 'AH0', 'JH']), 'use': ('NN', ['Y', 'UW1', 'S']), 'used': ('VBN', ['Y', 'UW1', 'Z', 'D']), 'using': ('VBG', ['Y', 'UW1', 'Z', 'IH0', 'NG']), 'useful': ('JJ', ['Y', 'UW1', 'S', 'F', 'AH0', 'L']), 'usefully': ('RB', ['Y', 'UW1', 'S', 'F', 'AH0', 'L', 'IY0']), 'usefulness': ('NN', ['Y', 'UW1', 'S', 'F', 'AH0', 'L', 'N', 'AH0', 'S']), 'useless': ('NN', ['Y', 'UW1', 'S', 'L', 'AH0', 'S']), 'user': ('NN', ['Y', 'UW1', 'Z', 'ER0']), 'usher': ('NN', ['AH1', 'SH', 'ER0']), 'ushered': ('JJ', ['AH1', 'SH', 'ER0', 'D']), 'ushering': ('VBG', ['AH1', 'SH', 'ER0', 'IH0', 'NG']), 'usual': ('JJ', ['Y', 'UW1', 'ZH', 'AH0', 'W', 'AH0', 'L']), 'usurped': ('JJ', ['Y', 'UW2', 'S', 'ER1', 'P', 'T']), 'usurping': ('VBG', ['Y', 'UW2', 'S', 'ER1', 'P', 'IH0', 'NG']), 'usurp': ('NN', ['Y', 'UW2', 'S', 'ER1', 'P']), 'usurpation': ('NN', ['Y', 'UW2', 'S', 'ER0', 'P', 'EY1', 'SH', 'AH0', 'N']), 'usury': ('NN', ['Y', 'UW1', 'ZH', 'ER0', 'IY0']), 'uterine': ('NN', ['Y', 'UW1', 'T', 'ER0', 'AH0', 'N']), 'uterus': ('NN', ['Y', 'UW1', 'T', 'ER0', 'AH0', 'S']), 'utica': ('NN', ['Y', 'UW1', 'T', 'AH0', 'K', 'AH0']), 'utilitarian': ('JJ', ['Y', 'UW0', 'T', 'IH2', 'L', 'AH0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'utility': ('NN', ['Y', 'UW0', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'utilization': ('NN', ['Y', 'UW2', 'T', 'AH0', 'L', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'utilized': ('JJ', ['Y', 'UW1', 'T', 'AH0', 'L', 'AY2', 'Z', 'D']), 'utilizing': ('VBG', ['Y', 'UW1', 'T', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'utilize': ('VB', ['Y', 'UW1', 'T', 'AH0', 'L', 'AY2', 'Z']), 'utmost': ('NN', ['AH1', 'T', 'M', 'OW2', 'S', 'T']), 'utopia': ('NN', ['Y', 'UW0', 'T', 'OW1', 'P', 'IY0', 'AH0']), 'utopian': ('JJ', ['Y', 'UW0', 'T', 'OW1', 'P', 'IY0', 'AH0', 'N']), 'utter': ('NN', ['AH1', 'T', 'ER0']), 'uttered': ('JJ', ['AH1', 'T', 'ER0', 'D']), 'uttering': ('VBG', ['AH1', 'T', 'ER0', 'IH0', 'NG']), 'utterance': ('NN', ['AH1', 'T', 'ER0', 'AH0', 'N', 'S']), 'utterly': ('RB', ['AH1', 'T', 'ER0', 'L', 'IY0']), 'uva': ('NN', ['Y', 'UW1', 'V', 'AH0']), 'v': ('NN', ['V', 'IY1']), 'vacancies': ('NNS', ['V', 'EY1', 'K', 'AH0', 'N', 'S', 'IY0', 'Z']), 'vacancy': ('NN', ['V', 'EY1', 'K', 'AH0', 'N', 'S', 'IY0']), 'vacant': ('NN', ['V', 'EY1', 'K', 'AH0', 'N', 'T']), 'vacated': ('VBN', ['V', 'EY0', 'K', 'EY1', 'T', 'AH0', 'D']), 'vacating': ('VBG', ['V', 'EY1', 'K', 'EY0', 'T', 'IH0', 'NG']), 'vacate': ('NN', ['V', 'EY1', 'K', 'EY0', 'T']), 'vacation': ('NN', ['V', 'EY0', 'K', 'EY1', 'SH', 'AH0', 'N']), 'vaccinated': ('VBN', ['V', 'AE1', 'K', 'S', 'AH0', 'N', 'EY0', 'T', 'IH0', 'D']), 'vaccinate': ('NN', ['V', 'AE1', 'K', 'S', 'AH0', 'N', 'EY0', 'T']), 'vaccination': ('NN', ['V', 'AE0', 'K', 'S', 'AH0', 'N', 'EY1', 'SH', 'AH0', 'N']), 'vaccine': ('NN', ['V', 'AE2', 'K', 'S', 'IY1', 'N']), 'vaccinia': ('NN', ['V', 'AH0', 'CH', 'IY1', 'N', 'IY0', 'AH0']), 'vacillated': ('VBN', ['V', 'AE1', 'S', 'AH0', 'L', 'EY0', 'T', 'IH0', 'D']), 'vacillating': ('VBG', ['V', 'AE1', 'S', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'vacillate': ('NN', ['V', 'AE1', 'S', 'AH0', 'L', 'EY2', 'T']), 'vacillation': ('NN', ['V', 'AE2', 'S', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'vacuous': ('JJ', ['V', 'AE1', 'K', 'Y', 'UW0', 'AH0', 'S']), 'vacuums': ('NNS', ['V', 'AE1', 'K', 'Y', 'UW0', 'M', 'Z']), 'vacuum': ('NN', ['V', 'AE1', 'K', 'Y', 'UW0', 'M']), 'vagabond': ('NN', ['V', 'AE1', 'G', 'AH0', 'B', 'AA0', 'N', 'D']), 'vagaries': ('NNS', ['V', 'EY1', 'G', 'ER0', 'IY0', 'Z']), 'vagary': ('JJ', ['V', 'EY1', 'G', 'ER0', 'IY0']), 'vagina': ('NN', ['V', 'AH0', 'JH', 'AY1', 'N', 'AH0']), 'vaginal': ('JJ', ['V', 'AH0', 'JH', 'AY1', 'N', 'AH0', 'L']), 'vagrancy': ('NN', ['V', 'EY1', 'G', 'R', 'AH0', 'N', 'S', 'IY0']), 'vagrant': ('NN', ['V', 'EY1', 'G', 'R', 'AH0', 'N', 'T']), 'vague': ('NN', ['V', 'EY1', 'G']), 'vaguely': ('RB', ['V', 'EY1', 'G', 'L', 'IY0']), 'vagueness': ('NN', ['V', 'EY1', 'G', 'N', 'IH0', 'S']), 'vail': ('NN', ['V', 'EY1', 'L']), 'vain': ('NN', ['V', 'EY1', 'N']), 'vainly': ('RB', ['V', 'EY1', 'N', 'L', 'IY0']), 'vale': ('NN', ['V', 'EY1', 'L']), 'valedictorian': ('JJ', ['V', 'AE2', 'L', 'AH0', 'D', 'IH2', 'K', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'valedictory': ('NN', ['V', 'AE2', 'L', 'AH0', 'D', 'IH1', 'K', 'T', 'ER0', 'IY0']), 'valence': ('NN', ['V', 'EY1', 'L', 'AH0', 'N', 'S']), 'valencia': ('NN', ['V', 'AH0', 'L', 'EH1', 'N', 'S', 'IY0', 'AH0']), 'valentia': ('NN', ['V', 'AA0', 'L', 'EH1', 'N', 'SH', 'AH0']), 'valentine': ('NN', ['V', 'AE1', 'L', 'AH0', 'N', 'T', 'AY2', 'N']), 'valerian': ('JJ', ['V', 'AH0', 'L', 'IH1', 'R', 'IY0', 'AH0', 'N']), 'valet': ('NN', ['V', 'AE0', 'L', 'EY1']), 'valhalla': ('NN', ['V', 'AE2', 'L', 'HH', 'AE1', 'L', 'AH0']), 'valiant': ('NN', ['V', 'AE1', 'L', 'Y', 'AH0', 'N', 'T']), 'valid': ('JJ', ['V', 'AE1', 'L', 'AH0', 'D']), 'validate': ('NN', ['V', 'AE1', 'L', 'AH0', 'D', 'EY0', 'T']), 'validation': ('NN', ['V', 'AE2', 'L', 'AH0', 'D', 'EY1', 'SH', 'AH0', 'N']), 'validity': ('NN', ['V', 'AH0', 'L', 'IH1', 'D', 'AH0', 'T', 'IY0']), 'validly': ('RB', ['V', 'AE1', 'L', 'IH0', 'D', 'L', 'IY0']), 'valleys': ('NNS', ['V', 'AE1', 'L', 'IY0', 'Z']), 'valley': ('NN', ['V', 'AE1', 'L', 'IY0']), 'valla': ('NN', ['V', 'AE1', 'L', 'AH0']), 'valonia': ('NN', ['V', 'AH0', 'L', 'OW1', 'N', 'Y', 'AH0']), 'valor': ('NN', ['V', 'AE1', 'L', 'ER0']), 'valuable': ('JJ', ['V', 'AE1', 'L', 'Y', 'AH0', 'B', 'AH0', 'L']), 'valuation': ('NN', ['V', 'AE0', 'L', 'Y', 'UW0', 'EY1', 'SH', 'AH0', 'N']), 'value': ('NN', ['V', 'AE1', 'L', 'Y', 'UW0']), 'valued': ('VBN', ['V', 'AE1', 'L', 'Y', 'UW0', 'D']), 'valuing': ('VBG', ['V', 'AE1', 'L', 'Y', 'UW0', 'IH0', 'NG']), 'valueless': ('NN', ['V', 'AE1', 'L', 'Y', 'UW0', 'L', 'AH0', 'S']), 'valve': ('NN', ['V', 'AE1', 'L', 'V']), 'vamp': ('NN', ['V', 'AE1', 'M', 'P']), 'vampire': ('NN', ['V', 'AE1', 'M', 'P', 'AY0', 'R']), 'van': ('NN', ['V', 'AE1', 'N']), 'vanadium': ('NN', ['V', 'AH0', 'N', 'EY1', 'D', 'IY0', 'AH0', 'M']), 'vandal': ('NN', ['V', 'AE1', 'N', 'D', 'AH0', 'L']), 'vandalism': ('NN', ['V', 'AE1', 'N', 'D', 'AH0', 'L', 'IH0', 'Z', 'AH0', 'M']), 'vandyke': ('NN', ['V', 'AE2', 'N', 'D', 'AY1', 'K']), 'vane': ('NN', ['V', 'EY1', 'N']), 'vanessa': ('NN', ['V', 'AH0', 'N', 'EH1', 'S', 'AH0']), 'vang': ('NN', ['V', 'AE1', 'NG']), 'vanguard': ('NN', ['V', 'AE1', 'N', 'G', 'AA2', 'R', 'D']), 'vanilla': ('NN', ['V', 'AH0', 'N', 'IH1', 'L', 'AH0']), 'vanillin': ('NN', ['V', 'AH0', 'N', 'IH1', 'L', 'IH0', 'N']), 'vanished': ('NNS', ['V', 'AE1', 'N', 'IH0', 'SH', 'T']), 'vanishing': ('VBG', ['V', 'AE1', 'N', 'IH0', 'SH', 'IH0', 'NG']), 'vanish': ('NN', ['V', 'AE1', 'N', 'IH0', 'SH']), 'vanities': ('NNS', ['V', 'AE1', 'N', 'AH0', 'T', 'IY0', 'Z']), 'vanity': ('NN', ['V', 'AE1', 'N', 'AH0', 'T', 'IY0']), 'vanquished': ('NNS', ['V', 'AE1', 'NG', 'K', 'W', 'IH0', 'SH', 'T']), 'vanquish': ('NN', ['V', 'AE1', 'NG', 'K', 'W', 'IH0', 'SH']), 'vantage': ('NN', ['V', 'AE1', 'N', 'T', 'AH0', 'JH']), 'vapid': ('NN', ['V', 'AE1', 'P', 'IH0', 'D']), 'vapor': ('NN', ['V', 'EY1', 'P', 'ER0']), 'vaporization': ('NN', ['V', 'EY0', 'P', 'ER0', 'AH0', 'Z', 'EY1', 'SH', 'AH0', 'N']), 'vaporized': ('VBN', ['V', 'EY1', 'P', 'ER0', 'AY2', 'Z', 'D']), 'vaporize': ('NN', ['V', 'EY1', 'P', 'ER0', 'AY2', 'Z']), 'vara': ('NN', ['V', 'AA1', 'R', 'AH0']), 'vari': ('NN', ['V', 'AA1', 'R', 'IY0']), 'variability': ('NN', ['V', 'EH0', 'R', 'IY0', 'AH0', 'B', 'IH1', 'L', 'IH0', 'T', 'IY0']), 'variable': ('JJ', ['V', 'EH1', 'R', 'IY0', 'AH0', 'B', 'AH0', 'L']), 'variably': ('RB', ['V', 'EH1', 'R', 'IY0', 'AH0', 'B', 'L', 'IY0']), 'variance': ('NN', ['V', 'EH1', 'R', 'IY0', 'AH0', 'N', 'S']), 'variant': ('NN', ['V', 'EH1', 'R', 'IY0', 'AH0', 'N', 'T']), 'variation': ('NN', ['V', 'EH2', 'R', 'IY0', 'EY1', 'SH', 'AH0', 'N']), 'varied': ('NNS', ['V', 'EH1', 'R', 'IY0', 'D']), 'variegated': ('VBN', ['V', 'EH1', 'R', 'IH0', 'G', 'EY0', 'T', 'AH0', 'D']), 'variegate': ('NN', ['V', 'EH1', 'R', 'IH0', 'G', 'EY0', 'T']), 'varietal': ('NN', ['V', 'ER0', 'IY1', 'T', 'AH0', 'L']), 'varieties': ('NNS', ['V', 'ER0', 'AY1', 'AH0', 'T', 'IY0', 'Z']), 'variety': ('NN', ['V', 'ER0', 'AY1', 'AH0', 'T', 'IY0']), 'various': ('JJ', ['V', 'EH1', 'R', 'IY0', 'AH0', 'S']), 'variously': ('RB', ['V', 'EH1', 'R', 'IY0', 'AH0', 'S', 'L', 'IY0']), 'varnish': ('NN', ['V', 'AA1', 'R', 'N', 'IH0', 'SH']), 'varnished': ('NNS', ['V', 'AA1', 'R', 'N', 'IH0', 'SH', 'T']), 'varvel': ('NN', ['V', 'AA0', 'R', 'V', 'EH1', 'L']), 'varying': ('VBG', ['V', 'EH1', 'R', 'IY0', 'IH0', 'NG']), 'vary': ('NN', ['V', 'EH1', 'R', 'IY0']), 'vascular': ('NN', ['V', 'AE1', 'S', 'K', 'Y', 'AH0', 'L', 'ER0']), 'vase': ('NN', ['V', 'EY1', 'S']), 'vaseline': ('NN', ['V', 'AE1', 'S', 'AH0', 'L', 'IY2', 'N']), 'vassal': ('NN', ['V', 'AE1', 'S', 'AH0', 'L']), 'vast': ('NN', ['V', 'AE1', 'S', 'T']), 'vastly': ('RB', ['V', 'AE1', 'S', 'T', 'L', 'IY0']), 'vastness': ('NN', ['V', 'AE1', 'S', 'T', 'N', 'AH0', 'S']), 'vat': ('NN', ['V', 'AE1', 'T']), 'vatted': ('VBN', ['V', 'AE1', 'T', 'IH0', 'D']), 'vatican': ('JJ', ['V', 'AE1', 'T', 'IH0', 'K', 'AH0', 'N']), 'vaudeville': ('NN', ['V', 'AA1', 'D', 'V', 'IH0', 'L']), 'vault': ('NN', ['V', 'AO1', 'L', 'T']), 'vaulted': ('VBN', ['V', 'AO1', 'L', 'T', 'AH0', 'D']), 'vaulting': ('VBG', ['V', 'AO1', 'L', 'T', 'IH0', 'NG']), 'vaunted': ('VBN', ['V', 'AO1', 'N', 'T', 'IH0', 'D']), 'veal': ('NN', ['V', 'IY1', 'L']), 'vector': ('NN', ['V', 'EH1', 'K', 'T', 'ER0']), 'veda': ('NN', ['V', 'EY1', 'D', 'AH0']), 'vedette': ('NN', ['V', 'IH0', 'D', 'EH1', 'T']), 'veered': ('VBN', ['V', 'IH1', 'R', 'D']), 'veering': ('VBG', ['V', 'IH1', 'R', 'IH0', 'NG']), 'veer': ('NN', ['V', 'IH1', 'R']), 'vega': ('NN', ['V', 'EY1', 'G', 'AH0']), 'vegetable': ('NN', ['V', 'EH1', 'JH', 'T', 'AH0', 'B', 'AH0', 'L']), 'vegetal': ('NN', ['V', 'EH1', 'JH', 'AH0', 'T', 'AH0', 'L']), 'vegetarian': ('JJ', ['V', 'EH2', 'JH', 'AH0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'vegetarianism': ('NN', ['V', 'EH2', 'JH', 'AH0', 'T', 'EH1', 'R', 'IY0', 'AH0', 'N', 'IH0', 'Z', 'AH0', 'M']), 'vegetate': ('NN', ['V', 'EH1', 'JH', 'AH0', 'T', 'EY2', 'T']), 'vegetation': ('NN', ['V', 'EH2', 'JH', 'AH0', 'T', 'EY1', 'SH', 'AH0', 'N']), 'vegetative': ('NN', ['V', 'EH2', 'JH', 'AH0', 'T', 'EY1', 'T', 'IH0', 'V']), 'vehemence': ('NN', ['V', 'IY1', 'AH0', 'M', 'AH0', 'N', 'S']), 'vehement': ('NN', ['V', 'IY1', 'AH0', 'M', 'AH0', 'N', 'T']), 'vehemently': ('RB', ['V', 'IY1', 'AH0', 'M', 'AH0', 'N', 'T', 'L', 'IY0']), 'vehicle': ('NN', ['V', 'IY1', 'HH', 'IH0', 'K', 'AH0', 'L']), 'vehicular': ('NN', ['V', 'IY0', 'HH', 'IH1', 'K', 'Y', 'AH0', 'L', 'ER0']), 'veil': ('NN', ['V', 'EY1', 'L']), 'veiled': ('VBN', ['V', 'EY1', 'L', 'D']), 'veiling': ('VBG', ['V', 'EY1', 'L', 'IH0', 'NG']), 'vein': ('NN', ['V', 'EY1', 'N']), 'velocities': ('NNS', ['V', 'AH0', 'L', 'AA1', 'S', 'AH0', 'T', 'IY0', 'Z']), 'velocity': ('NN', ['V', 'AH0', 'L', 'AA1', 'S', 'AH0', 'T', 'IY0']), 'vela': ('NN', ['V', 'EH1', 'L', 'AH0']), 'velvet': ('NN', ['V', 'EH1', 'L', 'V', 'AH0', 'T']), 'velvety': ('NN', ['V', 'EH1', 'L', 'V', 'AH0', 'T', 'IY0']), 'vena': ('NN', ['V', 'IY1', 'N', 'AH0']), 'venal': ('JJ', ['V', 'IY1', 'N', 'AH0', 'L']), 'venality': ('NN', ['V', 'IH0', 'N', 'AE1', 'L', 'IH0', 'T', 'IY0']), 'vending': ('VBG', ['V', 'EH1', 'N', 'D', 'IH0', 'NG']), 'vendee': ('NN', ['V', 'EH1', 'N', 'D', 'IY1']), 'vendetta': ('NN', ['V', 'EH0', 'N', 'D', 'EH1', 'T', 'AH0']), 'vendor': ('NN', ['V', 'EH1', 'N', 'D', 'ER0']), 'veneer': ('NN', ['V', 'AH0', 'N', 'IH1', 'R']), 'venerable': ('JJ', ['V', 'EH1', 'N', 'ER0', 'AH0', 'B', 'AH0', 'L']), 'venerated': ('VBN', ['V', 'EH1', 'N', 'ER0', 'EY2', 'T', 'IH0', 'D']), 'venerating': ('VBG', ['V', 'EH1', 'N', 'ER0', 'EY2', 'T', 'IH0', 'NG']), 'venerate': ('NN', ['V', 'EH1', 'N', 'ER0', 'EY2', 'T']), 'veneration': ('NN', ['V', 'EH2', 'N', 'ER0', 'EY1', 'SH', 'AH0', 'N']), 'venereal': ('NN', ['V', 'AH0', 'N', 'IH1', 'R', 'IY0', 'AH0', 'L']), 'venetian': ('JJ', ['V', 'AH0', 'N', 'IY1', 'SH', 'AH0', 'N']), 'veney': ('NN', ['V', 'EH1', 'N', 'IY0']), 'vengeance': ('NN', ['V', 'EH1', 'N', 'JH', 'AH0', 'N', 'S']), 'vengeful': ('NN', ['V', 'EH1', 'N', 'JH', 'F', 'AH0', 'L']), 'venison': ('NN', ['V', 'EH1', 'N', 'AH0', 'S', 'AH0', 'N']), 'venom': ('NN', ['V', 'EH1', 'N', 'AH0', 'M']), 'venomous': ('JJ', ['V', 'EH1', 'N', 'AH0', 'M', 'AH0', 'S']), 'venous': ('JJ', ['V', 'IY1', 'N', 'AH0', 'S']), 'vent': ('NN', ['V', 'EH1', 'N', 'T']), 'vented': ('VBN', ['V', 'EH1', 'N', 'T', 'IH0', 'D']), 'venting': ('VBG', ['V', 'EH1', 'N', 'T', 'IH0', 'NG']), 'venter': ('NN', ['V', 'EH1', 'N', 'T', 'ER0']), 'ventilated': ('VBN', ['V', 'EH1', 'N', 'T', 'AH0', 'L', 'EY2', 'T', 'IH0', 'D']), 'ventilating': ('VBG', ['V', 'EH1', 'N', 'T', 'AH0', 'L', 'EY2', 'T', 'IH0', 'NG']), 'ventilate': ('NN', ['V', 'EH1', 'N', 'T', 'AH0', 'L', 'EY2', 'T']), 'ventilation': ('NN', ['V', 'EH2', 'N', 'T', 'AH0', 'L', 'EY1', 'SH', 'AH0', 'N']), 'ventilator': ('NN', ['V', 'EH1', 'N', 'T', 'AH0', 'L', 'EY2', 'T', 'ER0']), 'ventral': ('JJ', ['V', 'EH1', 'N', 'T', 'R', 'AH0', 'L']), 'ventricular': ('NN', ['V', 'EH0', 'N', 'T', 'R', 'IH1', 'K', 'Y', 'UW0', 'L', 'ER0']), 'venture': ('NN', ['V', 'EH1', 'N', 'CH', 'ER0']), 'ventured': ('VBN', ['V', 'EH1', 'N', 'CH', 'ER0', 'D']), 'venturing': ('VBG', ['V', 'EH1', 'N', 'CH', 'ER0', 'IH0', 'NG']), 'venturesome': ('NN', ['V', 'EH1', 'N', 'CH', 'ER0', 'S', 'AH0', 'M']), 'venue': ('NN', ['V', 'EH1', 'N', 'Y', 'UW0']), 'venus': ('NN', ['V', 'IY1', 'N', 'AH0', 'S']), 'veracity': ('NN', ['V', 'ER0', 'AE1', 'S', 'IH0', 'T', 'IY0']), 'veranda': ('NN', ['V', 'ER0', 'AE1', 'N', 'D', 'AH0']), 'verb': ('NN', ['V', 'ER1', 'B']), 'verbal': ('NN', ['V', 'ER1', 'B', 'AH0', 'L']), 'verbalizing': ('VBG', ['V', 'ER1', 'B', 'AH0', 'L', 'AY2', 'Z', 'IH0', 'NG']), 'verbalize': ('NN', ['V', 'ER1', 'B', 'AH0', 'L', 'AY2', 'Z']), 'verbally': ('RB', ['V', 'ER0', 'B', 'AE1', 'L', 'IY0']), 'verbatim': ('NN', ['V', 'ER0', 'B', 'EY1', 'T', 'AH0', 'M']), 'verbena': ('NN', ['V', 'ER0', 'B', 'IY1', 'N', 'AH0']), 'verbiage': ('NN', ['V', 'ER1', 'B', 'IY0', 'IH0', 'JH']), 'verify': ('NN', ['V', 'EH1', 'R', 'AH0', 'F', 'AY2']), 'verdant': ('NN', ['V', 'ER1', 'D', 'AH0', 'N', 'T']), 'verdict': ('NN', ['V', 'ER1', 'D', 'IH0', 'K', 'T']), 'verdin': ('NN', ['V', 'ER1', 'D', 'IH0', 'N']), 'verge': ('NN', ['V', 'ER1', 'JH']), 'verging': ('VBG', ['V', 'ER1', 'JH', 'IH0', 'NG']), 'verifiable': ('JJ', ['V', 'EH1', 'R', 'AH0', 'F', 'AY2', 'AH0', 'B', 'AH0', 'L']), 'verified': ('VBN', ['V', 'EH1', 'R', 'AH0', 'F', 'AY2', 'D']), 'verifying': ('VBG', ['V', 'EH1', 'R', 'AH0', 'F', 'AY2', 'IH0', 'NG']), 'verine': ('NN', ['V', 'ER0', 'IY1', 'N', 'IY0']), 'verisimilitude': ('NN', ['V', 'EH2', 'R', 'AH0', 'S', 'AH0', 'M', 'IH1', 'L', 'AH0', 'T', 'UW2', 'D']), 'veritable': ('JJ', ['V', 'EH1', 'R', 'IH0', 'T', 'AH0', 'B', 'AH0', 'L']), 'verities': ('NNS', ['V', 'EH1', 'R', 'AH0', 'T', 'IY0', 'Z']), 'verity': ('NN', ['V', 'EH1', 'R', 'AH0', 'T', 'IY0']), 'vermilion': ('NN', ['V', 'ER0', 'M', 'IH1', 'L', 'Y', 'AH0', 'N']), 'vermin': ('NN', ['V', 'ER1', 'M', 'IH0', 'N']), 'vernacular': ('NN', ['V', 'ER0', 'N', 'AE1', 'K', 'Y', 'AH0', 'L', 'ER0']), 'vernal': ('JJ', ['V', 'ER1', 'N', 'AH0', 'L']), 'vernier': ('NN', ['V', 'ER1', 'N', 'IY0', 'ER0']), 'veronica': ('NN', ['V', 'ER0', 'AA1', 'N', 'IH0', 'K', 'AH0']), 'vers': ('NNS', ['V', 'ER1', 'S']), 'versatile': ('NN', ['V', 'ER1', 'S', 'AH0', 'T', 'AH0', 'L']), 'versatility': ('NN', ['V', 'ER2', 'S', 'AH0', 'T', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'verse': ('NN', ['V', 'ER1', 'S']), 'versed': ('VBN', ['V', 'ER1', 'S', 'T']), 'verser': ('NN', ['V', 'ER1', 'S', 'ER0']), 'version': ('NN', ['V', 'ER1', 'ZH', 'AH0', 'N']), 'versus': ('NN', ['V', 'ER1', 'S', 'AH0', 'S']), 'vert': ('NN', ['V', 'ER1', 'T']), 'vertebrae': ('NN', ['V', 'ER1', 'T', 'AH0', 'B', 'R', 'EY2']), 'vertebra': ('NN', ['V', 'ER1', 'T', 'AH0', 'B', 'R', 'AH0']), 'vertebral': ('JJ', ['V', 'ER1', 'T', 'AH0', 'B', 'R', 'AH0', 'L']), 'vertebrate': ('NN', ['V', 'ER1', 'T', 'AH0', 'B', 'R', 'EY2', 'T']), 'vertex': ('NN', ['V', 'ER1', 'T', 'EH2', 'K', 'S']), 'vertical': ('JJ', ['V', 'ER1', 'T', 'IH0', 'K', 'AH0', 'L']), 'vertically': ('RB', ['V', 'ER1', 'T', 'IH0', 'K', 'L', 'IY0']), 'vertigo': ('NN', ['V', 'ER1', 'T', 'IH0', 'G', 'OW2']), 'verve': ('NN', ['V', 'ER1', 'V']), 'very': ('RB', ['V', 'EH1', 'R', 'IY0']), 'vesicle': ('NN', ['V', 'EH1', 'Z', 'IH0', 'K', 'AH0', 'L']), 'vespa': ('NN', ['V', 'EY1', 'S', 'P', 'AH0']), 'vesper': ('NN', ['V', 'EH1', 'S', 'P', 'ER0']), 'vessel': ('NN', ['V', 'EH1', 'S', 'AH0', 'L']), 'vest': ('NN', ['V', 'EH1', 'S', 'T']), 'vested': ('JJ', ['V', 'EH1', 'S', 'T', 'AH0', 'D']), 'vesting': ('VBG', ['V', 'EH1', 'S', 'T', 'IH0', 'NG']), 'vesta': ('NN', ['V', 'EH1', 'S', 'T', 'AH0']), 'vestal': ('NN', ['V', 'EH1', 'S', 'T', 'AH0', 'L']), 'vestibule': ('NN', ['V', 'EH1', 'S', 'T', 'IH0', 'B', 'Y', 'UW2', 'L']), 'vestige': ('NN', ['V', 'EH1', 'S', 'T', 'IH0', 'JH']), 'vestigial': ('NN', ['V', 'AH0', 'S', 'T', 'IH1', 'JH', 'IY0', 'AH0', 'L']), 'veteran': ('NN', ['V', 'EH1', 'T', 'ER0', 'AH0', 'N']), 'veterinarian': ('JJ', ['V', 'EH2', 'T', 'R', 'AH0', 'N', 'EH1', 'R', 'IY0', 'AH0', 'N']), 'veterinary': ('JJ', ['V', 'EH1', 'T', 'R', 'AH0', 'N', 'EH2', 'R', 'IY0']), 'vetoes': ('NNS', ['V', 'IY1', 'T', 'OW0', 'Z']), 'veto': ('NN', ['V', 'IY1', 'T', 'OW0']), 'vetoed': ('NN', ['V', 'IY1', 'T', 'OW0', 'D']), 'vetoing': ('VBG', ['V', 'IY1', 'T', 'OW0', 'IH0', 'NG']), 'vexed': ('NN', ['V', 'EH1', 'K', 'S', 'T']), 'vexing': ('VBG', ['V', 'EH1', 'K', 'S', 'IH0', 'NG']), 'vex': ('NN', ['V', 'EH1', 'K', 'S']), 'vexatious': ('JJ', ['V', 'EH0', 'K', 'S', 'EY1', 'SH', 'AH0', 'S']), 'via': ('IN', ['V', 'AY1', 'AH0']), 'viability': ('NN', ['V', 'AY0', 'AH0', 'B', 'IH1', 'L', 'AH0', 'T', 'IY0']), 'viable': ('JJ', ['V', 'AY1', 'AH0', 'B', 'AH0', 'L']), 'viaduct': ('NN', ['V', 'AY1', 'AH0', 'D', 'AH0', 'K', 'T']), 'vial': ('NN', ['V', 'AY1', 'AH0', 'L']), 'vibrancy': ('NN', ['V', 'AY1', 'B', 'R', 'AH0', 'N', 'S', 'IY0']), 'vibrant': ('NN', ['V', 'AY1', 'B', 'R', 'AH0', 'N', 'T']), 'vibrate': ('NN', ['V', 'AY1', 'B', 'R', 'EY0', 'T']), 'vibrating': ('VBG', ['V', 'AY1', 'B', 'R', 'EY0', 'T', 'IH0', 'NG']), 'vibration': ('NN', ['V', 'AY0', 'B', 'R', 'EY1', 'SH', 'AH0', 'N']), 'vicar': ('NN', ['V', 'IH1', 'K', 'ER0']), 'vicarious': ('JJ', ['V', 'AY0', 'K', 'EH1', 'R', 'IY0', 'AH0', 'S']), 'vicariously': ('RB', ['V', 'AY0', 'K', 'EH1', 'R', 'IY0', 'AH0', 'S', 'L', 'IY0']), 'vicary': ('JJ', ['V', 'IH1', 'K', 'ER0', 'IY0']), 'vice': ('NN', ['V', 'AY1', 'S']), 'viceroy': ('NN', ['V', 'AY1', 'S', 'R', 'OY0']), 'vicinity': ('NN', ['V', 'AH0', 'S', 'IH1', 'N', 'AH0', 'T', 'IY0']), 'vicious': ('JJ', ['V', 'IH1', 'SH', 'AH0', 'S']), 'vicissitude': ('NN', ['V', 'IH2', 'S', 'IH1', 'S', 'IH0', 'T', 'UW0', 'D']), 'victim': ('NN', ['V', 'IH1', 'K', 'T', 'AH0', 'M']), 'victimized': ('VBN', ['V', 'IH1', 'K', 'T', 'AH0', 'M', 'AY0', 'Z', 'D']), 'victimizing': ('VBG', ['V', 'IH1', 'K', 'T', 'AH0', 'M', 'AY0', 'Z', 'IH0', 'NG']), 'victimize': ('NN', ['V', 'IH1', 'K', 'T', 'AH0', 'M', 'AY0', 'Z']), 'victor': ('NN', ['V', 'IH1', 'K', 'T', 'ER0']), 'victoria': ('NNS', ['V', 'IH0', 'K', 'T', 'AO1', 'R', 'IY0', 'AH0']), 'victorian': ('JJ', ['V', 'IH0', 'K', 'T', 'AO1', 'R', 'IY0', 'AH0', 'N']), 'victorine': ('NN', ['V', 'IY0', 'K', 'T', 'AO0', 'R', 'IY1', 'N', 'IY0']), 'victorious': ('JJ', ['V', 'IH0', 'K', 'T', 'AO1', 'R', 'IY0', 'AH0', 'S']), 'victories': ('NNS', ['V', 'IH1', 'K', 'T', 'ER0', 'IY0', 'Z']), 'victory': ('NN', ['V', 'IH1', 'K', 'T', 'ER0', 'IY0']), 'w': ('NN', ['D', 'AH1', 'B', 'AH0', 'L', 'Y', 'UW0']), 'waag': ('NN', ['W', 'AA1', 'G']), 'wacky': ('NN', ['W', 'AE1', 'K', 'IY0']), 'wad': ('NN', ['W', 'AA1', 'D']), 'waded': ('VBD', ['W', 'EY1', 'D', 'IH0', 'D']), 'waddle': ('NN', ['W', 'AA1', 'D', 'AH0', 'L']), 'wade': ('NN', ['W', 'EY1', 'D']), 'wading': ('VBG', ['W', 'EY1', 'D', 'IH0', 'NG']), 'wader': ('NN', ['W', 'EY1', 'D', 'ER0']), 'wafer': ('NN', ['W', 'EY1', 'F', 'ER0']), 'waffle': ('NN', ['W', 'AA1', 'F', 'AH0', 'L']), 'wafted': ('VBN', ['W', 'AA1', 'F', 'T', 'IH0', 'D']), 'wafting': ('VBG', ['W', 'AA1', 'F', 'T', 'IH0', 'NG']), 'waft': ('NN', ['W', 'AA1', 'F', 'T']), 'wagged': ('VBD', ['W', 'AE1', 'G', 'D']), 'wagging': ('VBG', ['W', 'AE1', 'G', 'IH0', 'NG']), 'wag': ('NN', ['W', 'AE1', 'G']), 'waged': ('VBD', ['W', 'EY1', 'JH', 'D']), 'waging': ('VBG', ['W', 'EY1', 'JH', 'IH0', 'NG']), 'wage': ('NN', ['W', 'EY1', 'JH']), 'wager': ('NN', ['W', 'EY1', 'JH', 'ER0']), 'wagered': ('VBD', ['W', 'EY1', 'JH', 'ER0', 'D']), 'wagering': ('VBG', ['W', 'EY1', 'JH', 'ER0', 'IH0', 'NG']), 'wages': ('NNS', ['W', 'EY1', 'JH', 'AH0', 'Z']), 'waggling': ('VBG', ['W', 'AE1', 'G', 'AH0', 'L', 'IH0', 'NG']), 'wagon': ('NN', ['W', 'AE1', 'G', 'AH0', 'N']), 'wagoner': ('NN', ['W', 'AE1', 'G', 'AH0', 'N', 'ER0']), 'wah': ('NN', ['W', 'AA1']), 'waid': ('NN', ['W', 'EY1', 'D']), 'waif': ('NN', ['W', 'EY1', 'F']), 'wail': ('NN', ['W', 'EY1', 'L']), 'wailed': ('VBD', ['W', 'EY1', 'L', 'D']), 'wailing': ('VBG', ['W', 'EY1', 'L', 'IH0', 'NG']), 'wain': ('NN', ['W', 'EY1', 'N']), 'wainwright': ('NN', ['W', 'EY1', 'N', 'R', 'AY2', 'T']), 'waist': ('NN', ['W', 'EY1', 'S', 'T']), 'waited': ('VBD', ['W', 'EY1', 'T', 'AH0', 'D']), 'waiting': ('VBG', ['W', 'EY1', 'T', 'IH0', 'NG']), 'wait': ('NN', ['W', 'EY1', 'T']), 'waiter': ('NN', ['W', 'EY1', 'T', 'ER0']), 'waitress': ('NN', ['W', 'EY1', 'T', 'R', 'AH0', 'S']), 'waive': ('NN', ['W', 'EY1', 'V']), 'waived': ('VBN', ['W', 'EY1', 'V', 'D']), 'waiving': ('VBG', ['W', 'EY1', 'V', 'IH0', 'NG']), 'waiver': ('NN', ['W', 'EY1', 'V', 'ER0']), 'wake': ('NN', ['W', 'EY1', 'K']), 'woke': ('NN', ['W', 'OW1', 'K']), 'waking': ('VBG', ['W', 'EY1', 'K', 'IH0', 'NG']), 'waken': ('NN', ['W', 'EY1', 'K', 'AH0', 'N']), 'wald': ('NN', ['W', 'AO1', 'L', 'D']), 'wale': ('NN', ['W', 'EY1', 'L']), 'walked': ('VBD', ['W', 'AO1', 'K', 'T']), 'walking': ('VBG', ['W', 'AO1', 'K', 'IH0', 'NG']), 'walk': ('NN', ['W', 'AO1', 'K']), 'walker': ('NN', ['W', 'AO1', 'K', 'ER0']), 'wall': ('NN', ['W', 'AO1', 'L']), 'walled': ('VBD', ['W', 'AO1', 'L', 'D']), 'walling': ('VBG', ['W', 'AO1', 'L', 'IH0', 'NG']), 'wallabies': ('NNS', ['W', 'AA1', 'L', 'AH0', 'B', 'IY0', 'Z']), 'wallaby': ('NN', ['W', 'AA1', 'L', 'AH0', 'B', 'IY0']), 'waller': ('NN', ['W', 'AO1', 'L', 'ER0']), 'wallet': ('NN', ['W', 'AO1', 'L', 'AH0', 'T']), 'wallflower': ('NN', ['W', 'AO1', 'L', 'F', 'L', 'AW2', 'ER0']), 'wallop': ('NN', ['W', 'AA1', 'L', 'AH0', 'P']), 'walloping': ('VBG', ['W', 'AO1', 'L', 'AH0', 'P', 'IH0', 'NG']), 'wallowed': ('NN', ['W', 'AA1', 'L', 'OW0', 'D']), 'wallowing': ('VBG', ['W', 'AA1', 'L', 'OW0', 'IH0', 'NG']), 'wallow': ('NN', ['W', 'AA1', 'L', 'OW0']), 'walnut': ('NN', ['W', 'AO1', 'L', 'N', 'AH2', 'T']), 'walrus': ('NN', ['W', 'AO1', 'L', 'R', 'AH0', 'S']), 'walter': ('NN', ['W', 'AO1', 'L', 'T', 'ER0']), 'waltz': ('NN', ['W', 'AO1', 'L', 'T', 'S']), 'waltzing': ('VBG', ['W', 'AO1', 'L', 'T', 'S', 'IH0', 'NG']), 'waltzer': ('NN', ['W', 'AO1', 'L', 'T', 'S', 'ER0']), 'wamble': ('JJ', ['W', 'AA1', 'M', 'B', 'AH0', 'L']), 'wampum': ('NN', ['W', 'AA1', 'M', 'P', 'AH0', 'M']), 'wan': ('NN', ['W', 'AA1', 'N']), 'wand': ('NN', ['W', 'AA1', 'N', 'D']), 'wandered': ('VBD', ['W', 'AA1', 'N', 'D', 'ER0', 'D']), 'wandering': ('VBG', ['W', 'AA1', 'N', 'D', 'ER0', 'IH0', 'NG']), 'wander': ('NN', ['W', 'AA1', 'N', 'D', 'ER0']), 'wanderer': ('NN', ['W', 'AA1', 'N', 'D', 'ER0', 'ER0']), 'waned': ('VBD', ['W', 'EY1', 'N', 'D']), 'waning': ('VBG', ['W', 'EY1', 'N', 'IH0', 'NG']), 'wane': ('NN', ['W', 'EY1', 'N']), 'wang': ('NN', ['W', 'AE1', 'NG']), 'wanger': ('NN', ['W', 'AE1', 'NG', 'ER0']), 'want': ('NN', ['W', 'AA1', 'N', 'T']), 'wanted': ('VBD', ['W', 'AA1', 'N', 'T', 'AH0', 'D']), 'wanting': ('VBG', ['W', 'AA1', 'N', 'T', 'IH0', 'NG']), 'wantage': ('NN', ['W', 'AA1', 'N', 'T', 'IH0', 'JH']), 'wanton': ('NN', ['W', 'AO1', 'N', 'T', 'AH0', 'N']), 'wantonly': ('RB', ['W', 'AO1', 'N', 'T', 'AH0', 'N', 'L', 'IY0']), 'wapiti': ('NN', ['W', 'AH0', 'P', 'IY1', 'T', 'IY0']), 'wapping': ('VBG', ['W', 'AA1', 'P', 'IH0', 'NG']), 'war': ('NN', ['W', 'AO1', 'R']), 'warring': ('VBG', ['W', 'AO1', 'R', 'IH0', 'NG']), 'warble': ('JJ', ['W', 'AO1', 'R', 'B', 'AH0', 'L']), 'warbled': ('VBD', ['W', 'AO1', 'R', 'B', 'AH0', 'L', 'D']), 'warbling': ('VBG', ['W', 'AO1', 'R', 'B', 'AH0', 'L', 'IH0', 'NG']), 'warbler': ('NN', ['W', 'AO1', 'R', 'B', 'L', 'ER0']), 'ward': ('NN', ['W', 'AO1', 'R', 'D']), 'warded': ('VBD', ['W', 'AO1', 'R', 'D', 'IH0', 'D']), 'warding': ('VBG', ['W', 'AO1', 'R', 'D', 'IH0', 'NG']), 'warden': ('NN', ['W', 'AO1', 'R', 'D', 'AH0', 'N']), 'warder': ('NN', ['W', 'AO1', 'R', 'D', 'ER0']), 'wardrobe': ('NN', ['W', 'AO1', 'R', 'D', 'R', 'OW2', 'B']), 'ware': ('NN', ['W', 'EH1', 'R']), 'warehouses': ('NNS', ['W', 'EH1', 'R', 'HH', 'AW2', 'Z', 'IH0', 'Z']), 'warehouse': ('NN', ['W', 'EH1', 'R', 'HH', 'AW2', 'S']), 'warehoused': ('VBN', ['W', 'EH1', 'R', 'HH', 'AW2', 'Z', 'D']), 'warehousing': ('VBG', ['W', 'EH1', 'R', 'HH', 'AW2', 'Z', 'IH0', 'NG']), 'wares': ('NNS', ['W', 'EH1', 'R', 'Z']), 'warfare': ('NN', ['W', 'AO1', 'R', 'F', 'EH2', 'R']), 'warily': ('RB', ['W', 'EH1', 'R', 'AH0', 'L', 'IY0']), 'wariness': ('NN', ['W', 'EH1', 'R', 'IY0', 'N', 'IH0', 'S']), 'wark': ('NN', ['W', 'AO1', 'R', 'K']), 'warlike': ('NN', ['W', 'AO1', 'R', 'L', 'AY2', 'K']), 'warling': ('VBG', ['W', 'AO1', 'R', 'L', 'IH0', 'NG']), 'warm': ('NN', ['W', 'AO1', 'R', 'M']), 'warmed': ('VBN', ['W', 'AO1', 'R', 'M', 'D']), 'warming': ('VBG', ['W', 'AO1', 'R', 'M', 'IH0', 'NG']), 'warmer': ('NN', ['W', 'AO1', 'R', 'M', 'ER0']), 'warmly': ('RB', ['W', 'AO1', 'R', 'M', 'L', 'IY0']), 'warmonger': ('NN', ['W', 'AO1', 'R', 'M', 'AA2', 'NG', 'G', 'ER0']), 'warmth': ('NN', ['W', 'AO1', 'R', 'M', 'TH']), 'warn': ('NN', ['W', 'AO1', 'R', 'N']), 'warned': ('VBD', ['W', 'AO1', 'R', 'N', 'D']), 'warning': ('VBG', ['W', 'AO1', 'R', 'N', 'IH0', 'NG']), 'warner': ('NN', ['W', 'AO1', 'R', 'N', 'ER0']), 'warningly': ('RB', ['W', 'AO1', 'R', 'N', 'IH0', 'NG', 'L', 'IY0']), 'warped': ('VBD', ['W', 'AO1', 'R', 'P', 'T']), 'warping': ('VBG', ['W', 'AO1', 'R', 'P', 'IH0', 'NG']), 'warp': ('NN', ['W', 'AO1', 'R', 'P']), 'warpath': ('NN', ['W', 'AO1', 'R', 'P', 'AE2', 'TH']), 'warrant': ('NN', ['W', 'AO1', 'R', 'AH0', 'N', 'T']), 'warranted': ('VBN', ['W', 'AO1', 'R', 'AH0', 'N', 'T', 'IH0', 'D']), 'warranties': ('NNS', ['W', 'AO1', 'R', 'AH0', 'N', 'T', 'IY0', 'Z']), 'warranty': ('NN', ['W', 'AO1', 'R', 'AH0', 'N', 'T', 'IY0']), 'warren': ('NNS', ['W', 'AO1', 'R', 'AH0', 'N']), 'warrior': ('NN', ['W', 'AO1', 'R', 'IY0', 'ER0']), 'warsaw': ('NN', ['W', 'AO1', 'R', 'S', 'AO2']), 'wart': ('NN', ['W', 'AO1', 'R', 'T']), 'wary': ('JJ', ['W', 'EH1', 'R', 'IY0']), 'was': ('VBD', ['W', 'AA1', 'Z']), 'washed': ('VBN', ['W', 'AA1', 'SH', 'T']), 'washing': ('VBG', ['W', 'AA1', 'SH', 'IH0', 'NG']), 'wash': ('NN', ['W', 'AA1', 'SH']), 'washable': ('JJ', ['W', 'AA1', 'SH', 'AH0', 'B', 'AH0', 'L']), 'washboard': ('NN', ['W', 'AA1', 'SH', 'B', 'AO2', 'R', 'D']), 'washer': ('NN', ['W', 'AA1', 'SH', 'ER0']), 'washingtonian': ('JJ', ['W', 'AA2', 'SH', 'IH0', 'NG', 'T', 'OW1', 'N', 'IY0', 'AH0', 'N']), 'washout': ('NN', ['W', 'AA1', 'SH', 'AW2', 'T']), 'washtub': ('NN', ['W', 'AA1', 'SH', 'T', 'AH2', 'B']), 'washy': ('NN', ['W', 'AA1', 'SH', 'IY0']), 'wasp': ('NN', ['W', 'AA1', 'S', 'P']), 'waste': ('NN', ['W', 'EY1', 'S', 'T']), 'wasted': ('VBN', ['W', 'EY1', 'S', 'T', 'AH0', 'D']), 'wasting': ('VBG', ['W', 'EY1', 'S', 'T', 'IH0', 'NG']), 'wastebasket': ('NN', ['W', 'EY1', 'S', 'T', 'B', 'AE2', 'S', 'K', 'AH0', 'T']), 'wasteful': ('NN', ['W', 'EY1', 'S', 'T', 'F', 'AH0', 'L']), 'waster': ('NN', ['W', 'EY1', 'S', 'T', 'ER0']), 'watch': ('NN', ['W', 'AA1', 'CH']), 'watched': ('VBN', ['W', 'AA1', 'CH', 'T']), 'watching': ('VBG', ['W', 'AA1', 'CH', 'IH0', 'NG']), 'watchdog': ('NN', ['W', 'AA1', 'CH', 'D', 'AO2', 'G']), 'watcher': ('NN', ['W', 'AA1', 'CH', 'ER0']), 'watches': ('NNS', ['W', 'AA1', 'CH', 'AH0', 'Z']), 'watchful': ('NN', ['W', 'AA1', 'CH', 'F', 'AH0', 'L']), 'watchmen': ('NNS', ['W', 'AA1', 'CH', 'M', 'EH0', 'N']), 'watchman': ('NN', ['W', 'AA1', 'CH', 'M', 'AH0', 'N']), 'watchword': ('NN', ['W', 'AA1', 'CH', 'W', 'ER2', 'D']), 'water': ('NN', ['W', 'AO1', 'T', 'ER0']), 'watered': ('VBD', ['W', 'AO1', 'T', 'ER0', 'D']), 'watering': ('VBG', ['W', 'AO1', 'T', 'ER0', 'IH0', 'NG']), 'waterer': ('NN', ['W', 'AO1', 'T', 'ER0', 'ER0']), 'waterfall': ('NN', ['W', 'AO1', 'T', 'ER0', 'F', 'AO2', 'L']), 'waterfowl': ('NN', ['W', 'AO1', 'T', 'ER0', 'F', 'AW2', 'L']), 'waterishness': ('NN', ['W', 'AO1', 'T', 'ER0', 'IH0', 'SH', 'N', 'AH0', 'S']), 'watermen': ('NNS', ['W', 'AO1', 'T', 'ER0', 'M', 'AH0', 'N']), 'waterman': ('NN', ['W', 'AO1', 'T', 'ER0', 'M', 'AH0', 'N']), 'watermark': ('NN', ['W', 'AO1', 'T', 'ER0', 'M', 'AA2', 'R', 'K']), 'watermelon': ('NN', ['W', 'AO1', 'T', 'ER0', 'M', 'EH2', 'L', 'AH0', 'N']), 'waterproof': ('NN', ['W', 'AO1', 'T', 'ER0', 'P', 'R', 'UW2', 'F']), 'waterproofing': ('VBG', ['W', 'AO1', 'T', 'ER0', 'P', 'R', 'UW2', 'F', 'IH0', 'NG']), 'watershed': ('VBN', ['W', 'AO1', 'T', 'ER0', 'SH', 'EH2', 'D']), 'x': ('NN', ['EH1', 'K', 'S']), 'xebec': ('NN', ['Z', 'EH1', 'B', 'AH0', 'K']), 'xenia': ('NN', ['Z', 'IY1', 'N', 'IY0', 'AH0']), 'xylophone': ('NN', ['Z', 'AY1', 'L', 'AH0', 'F', 'OW2', 'N']), 'y': ('NN', ['W', 'AY1']), "y's": ('NN', ['W', 'AY1', 'Z']), 'ya': ('NN', ['Y', 'AA1']), 'yacht': ('NN', ['Y', 'AA1', 'T']), 'yachter': ('NN', ['Y', 'AA1', 'T', 'ER0']), 'yachting': ('VBG', ['Y', 'AA1', 'T', 'IH0', 'NG']), 'yachtsman': ('NN', ['Y', 'AA1', 'T', 'S', 'M', 'AH0', 'N']), 'yager': ('NN', ['Y', 'EY1', 'JH', 'ER0']), 'yak': ('NN', ['Y', 'AE1', 'K']), 'yam': ('NN', ['Y', 'AE1', 'M']), 'yang': ('NN', ['Y', 'AE1', 'NG']), 'yank': ('NN', ['Y', 'AE1', 'NG', 'K']), 'yanked': ('NNS', ['Y', 'AE1', 'NG', 'K', 'T']), 'yanking': ('VBG', ['Y', 'AE1', 'NG', 'K', 'IH0', 'NG']), 'yankee': ('NN', ['Y', 'AE1', 'NG', 'K', 'IY0']), 'yap': ('NN', ['Y', 'AE1', 'P']), 'yard': ('NN', ['Y', 'AA1', 'R', 'D']), 'yardstick': ('NN', ['Y', 'AA1', 'R', 'D', 'S', 'T', 'IH2', 'K']), 'yarn': ('NN', ['Y', 'AA1', 'R', 'N']), 'yarrow': ('NN', ['Y', 'EH1', 'R', 'OW0']), 'yaupon': ('NN', ['Y', 'AO1', 'P', 'AH0', 'N']), 'yaw': ('NN', ['Y', 'AO1']), 'yawned': ('VBN', ['Y', 'AO1', 'N', 'D']), 'yawning': ('VBG', ['Y', 'AO1', 'N', 'IH0', 'NG']), 'yawn': ('NN', ['Y', 'AO1', 'N']), 'ye': ('NN', ['Y', 'IY1']), 'yen': ('NN', ['Y', 'EH1', 'N']), 'yea': ('NN', ['Y', 'EY1']), 'year': ('NN', ['Y', 'IH1', 'R']), 'yearbook': ('NN', ['Y', 'IH1', 'R', 'B', 'UH2', 'K']), 'yearling': ('VBG', ['Y', 'ER1', 'L', 'IH0', 'NG']), 'yearly': ('RB', ['Y', 'IH1', 'R', 'L', 'IY0']), 'yearned': ('VBN', ['Y', 'ER1', 'N', 'D']), 'yearning': ('VBG', ['Y', 'ER1', 'N', 'IH0', 'NG']), 'yearn': ('NN', ['Y', 'ER1', 'N']), 'yearnings': ('NNS', ['Y', 'ER1', 'N', 'IH0', 'NG', 'Z']), 'yeast': ('NN', ['Y', 'IY1', 'S', 'T']), 'yelled': ('VBN', ['Y', 'EH1', 'L', 'D']), 'yelling': ('VBG', ['Y', 'EH1', 'L', 'IH0', 'NG']), 'yell': ('NN', ['Y', 'EH1', 'L']), 'yellow': ('NN', ['Y', 'EH1', 'L', 'OW0']), 'yellowing': ('VBG', ['Y', 'EH1', 'L', 'OW0', 'IH0', 'NG']), 'yellowish': ('NN', ['Y', 'EH1', 'L', 'OW0', 'IH0', 'SH']), 'yellows': ('NNS', ['Y', 'EH1', 'L', 'OW0', 'Z']), 'yelping': ('VBG', ['Y', 'EH1', 'L', 'P', 'IH0', 'NG']), 'yelp': ('NN', ['Y', 'EH1', 'L', 'P']), 'yeoman': ('NN', ['Y', 'OW1', 'M', 'AH0', 'N']), 'yer': ('NN', ['Y', 'ER1']), 'yes': ('NNS', ['Y', 'EH1', 'S']), 'yest': ('JJS', ['Y', 'EH1', 'S', 'T']), 'yesterday': ('NN', ['Y', 'EH1', 'S', 'T', 'ER0', 'D', 'EY2']), 'yesteryear': ('NN', ['Y', 'EH1', 'S', 'T', 'ER0', 'Y', 'IH2', 'R']), 'yet': ('RB', ['Y', 'EH1', 'T']), 'yew': ('NN', ['Y', 'UW1']), 'yielded': ('VBN', ['Y', 'IY1', 'L', 'D', 'AH0', 'D']), 'yielding': ('VBG', ['Y', 'IY1', 'L', 'D', 'IH0', 'NG']), 'yield': ('NN', ['Y', 'IY1', 'L', 'D']), 'yin': ('NN', ['Y', 'IH1', 'N']), 'yodeling': ('VBG', ['Y', 'OW1', 'D', 'AH0', 'L', 'IH0', 'NG']), 'yodel': ('NN', ['Y', 'OW1', 'D', 'AH0', 'L']), 'yoga': ('NN', ['Y', 'OW1', 'G', 'AH0']), 'yogi': ('NN', ['Y', 'OW1', 'G', 'IY0']), 'yoke': ('NN', ['Y', 'OW1', 'K']), 'yoked': ('NNS', ['Y', 'OW1', 'K', 'T']), 'yokel': ('NN', ['Y', 'OW1', 'K', 'EH0', 'L']), 'yolk': ('NN', ['Y', 'OW1', 'K']), 'yon': ('NN', ['Y', 'AA1', 'N']), 'yonder': ('NN', ['Y', 'AA1', 'N', 'D', 'ER0']), 'yonker': ('NN', ['Y', 'AA1', 'NG', 'K', 'ER0']), 'yore': ('NN', ['Y', 'AO1', 'R']), 'yorker': ('NN', ['Y', 'AO1', 'R', 'K', 'ER0']), 'yorkshire': ('NN', ['Y', 'AO1', 'R', 'K', 'SH', 'ER0']), 'you': ('PRP', ['Y', 'UW1']), 'young': ('JJ', ['Y', 'AH1', 'NG']), 'youngster': ('NN', ['Y', 'AH1', 'NG', 'S', 'T', 'ER0']), 'younker': ('NN', ['Y', 'AH1', 'NG', 'K', 'ER0']), 'your': ('PRP$', ['Y', 'AO1', 'R']), 'yours': ('NNS', ['Y', 'UH1', 'R', 'Z']), 'yourselves': ('NNS', ['Y', 'UH0', 'R', 'S', 'EH1', 'L', 'V', 'Z']), 'yourself': ('PRP', ['Y', 'ER0', 'S', 'EH1', 'L', 'F']), 'youths': ('NNS', ['Y', 'UW1', 'DH', 'Z']), 'youth': ('NN', ['Y', 'UW1', 'TH']), 'youthful': ('NN', ['Y', 'UW1', 'TH', 'F', 'AH0', 'L']), 'yow': ('NN', ['Y', 'AW1']), 'yttrium': ('NN', ['IH1', 'T', 'R', 'IY0', 'AH0', 'M']), 'yu': ('NN', ['Y', 'UW1']), 'yucca': ('NN', ['Y', 'AH1', 'K', 'AH0']), 'yuck': ('NN', ['Y', 'AH1', 'K']), 'yuen': ('NN', ['Y', 'W', 'EH1', 'N']), 'yule': ('NN', ['Y', 'UW1', 'L']), 'yuletide': ('NN', ['Y', 'UW1', 'L', 'T', 'AY2', 'D']), 'z': ('NN', ['Z', 'IY1']), 'zambo': ('NN', ['Z', 'AE1', 'M', 'B', 'OW0']), 'zander': ('NN', ['Z', 'AE1', 'N', 'D', 'ER0']), 'zany': ('NN', ['Z', 'EY1', 'N', 'IY0']), 'zea': ('NN', ['Z', 'IY1']), 'zeal': ('NN', ['Z', 'IY1', 'L']), 'zealot': ('NN', ['Z', 'EH1', 'L', 'AH0', 'T']), 'zealotry': ('NN', ['Z', 'EH1', 'L', 'AH0', 'T', 'R', 'IY0']), 'zealous': ('JJ', ['Z', 'EH1', 'L', 'AH0', 'S']), 'zebra': ('NN', ['Z', 'IY1', 'B', 'R', 'AH0']), 'zed': ('VBN', ['Z', 'EH1', 'D']), 'zehner': ('NN', ['Z', 'EH1', 'N', 'ER0']), 'zenith': ('NN', ['Z', 'IY1', 'N', 'AH0', 'TH']), 'zephyr': ('NN', ['Z', 'EH1', 'F', 'ER0']), 'zeros': ('NN', ['Z', 'IH1', 'R', 'OW0', 'Z']), 'zeroes': ('NNS', ['Z', 'IH1', 'R', 'OW0', 'Z']), 'zero': ('NN', ['Z', 'IH1', 'R', 'OW0']), 'zest': ('NN', ['Z', 'EH1', 'S', 'T']), 'zeta': ('NN', ['Z', 'EY1', 'T', 'AH0']), 'zeus': ('NN', ['Z', 'UW1', 'S']), 'zigzag': ('NN', ['Z', 'IH1', 'G', 'Z', 'AE0', 'G']), 'zigzagged': ('VBN', ['Z', 'IH1', 'G', 'Z', 'AE0', 'G', 'D']), 'zigzagging': ('VBG', ['Z', 'IH1', 'G', 'Z', 'AE0', 'G', 'IH0', 'NG']), 'zilla': ('NN', ['Z', 'IH1', 'L', 'AH0']), 'zillah': ('NN', ['Z', 'IH1', 'L', 'AH0']), 'zinc': ('NN', ['Z', 'IH1', 'NG', 'K']), 'zingaro': ('NN', ['Z', 'IH0', 'NG', 'G', 'AA1', 'R', 'OW0']), 'zink': ('NN', ['Z', 'IH1', 'NG', 'K']), 'zinnia': ('NN', ['Z', 'IH1', 'N', 'IY0', 'AH0']), 'zion': ('NN', ['Z', 'AY1', 'AH0', 'N']), 'zircon': ('NN', ['Z', 'ER1', 'K', 'AH0', 'N']), 'zirconium': ('NN', ['Z', 'ER0', 'K', 'OW1', 'N', 'IY0', 'AH0', 'M']), 'zither': ('NN', ['Z', 'IH1', 'DH', 'ER0'])} | 561,672 | 561,672 | 0.363694 | 79,556 | 561,672 | 2.567713 | 0.124076 | 0.028275 | 0.009605 | 0.008459 | 0.358895 | 0.208751 | 0.118476 | 0.064359 | 0.030532 | 0.011147 | 0 | 0.049045 | 0.141616 | 561,672 | 1 | 561,672 | 561,672 | 0.374652 | 0 | 0 | 0 | 0 | 0 | 0.363712 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | false | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.