hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
92f4701ac7597efabb79e1409fe5e3ae82d2bd9b
| 133
|
py
|
Python
|
graphadv/attack/__init__.py
|
EdisonLeeeee/graphadv
|
bff372768b4082af95de9e576c7083ba42773666
|
[
"MIT"
] | 5
|
2020-08-01T15:54:58.000Z
|
2021-12-15T10:47:45.000Z
|
graphadv/attack/__init__.py
|
EdisonLeeeee/graphadv
|
bff372768b4082af95de9e576c7083ba42773666
|
[
"MIT"
] | 5
|
2020-11-13T19:01:52.000Z
|
2022-02-10T02:02:34.000Z
|
graphadv/attack/__init__.py
|
EdisonLeeeee/graphadv
|
bff372768b4082af95de9e576c7083ba42773666
|
[
"MIT"
] | 2
|
2020-10-12T08:31:06.000Z
|
2020-12-14T08:24:57.000Z
|
from graphadv.attack.targeted import *
from graphadv.attack.untargeted import *
from graphadv.attack.baseattacker import BaseAttacker
| 44.333333
| 53
| 0.857143
| 16
| 133
| 7.125
| 0.4375
| 0.315789
| 0.473684
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082707
| 133
| 3
| 53
| 44.333333
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
132f8a04a2b9e8ec1036fdb1ead42400fff010a7
| 78,418
|
py
|
Python
|
gluon/tests/test_router.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | 3
|
2017-07-22T18:37:23.000Z
|
2017-07-29T15:08:28.000Z
|
gluon/tests/test_router.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | 2
|
2019-08-05T21:08:11.000Z
|
2020-04-24T23:21:25.000Z
|
gluon/tests/test_router.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | 3
|
2018-12-08T23:59:17.000Z
|
2019-02-13T23:04:38.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Unit tests for rewrite.py routers option"""
from __future__ import print_function
import os
import unittest
import tempfile
import logging
from gluon.rewrite import load, filter_url, filter_err, get_effective_router, map_url_out
from gluon.html import URL
from gluon.fileutils import abspath
from gluon.settings import global_settings
from gluon.http import HTTP
from gluon.storage import Storage
from gluon._compat import to_bytes, PY2
logger = None
oldcwd = None
root = None
def norm_root(root):
return root.replace('/', os.sep)
def setUpModule():
def make_apptree():
"build a temporary applications tree"
# applications/
os.mkdir(abspath('applications'))
# applications/app/
for app in ('admin', 'examples', 'welcome'):
os.mkdir(abspath('applications', app))
# applications/app/(controllers, static)
for subdir in ('controllers', 'static'):
os.mkdir(abspath('applications', app, subdir))
# applications/admin/controllers/*.py
for ctr in ('appadmin', 'default', 'gae', 'mercurial', 'shell', 'wizard'):
open(abspath('applications', 'admin',
'controllers', '%s.py' % ctr), 'w').close()
# applications/examples/controllers/*.py
for ctr in ('ajax_examples', 'appadmin', 'default', 'global', 'spreadsheet'):
open(abspath('applications', 'examples',
'controllers', '%s.py' % ctr), 'w').close()
# applications/welcome/controllers/*.py
# (include controller that collides with another app)
for ctr in ('appadmin', 'default', 'other', 'admin'):
open(abspath('applications', 'welcome',
'controllers', '%s.py' % ctr), 'w').close()
# create an app-specific routes.py for examples app
routes = open(abspath('applications', 'examples', 'routes.py'), 'w')
routes.write("routers=dict(examples=dict(default_function='exdef'))")
routes.close()
# create language files for examples app
for lang in ('en', 'it'):
os.mkdir(abspath('applications', 'examples', 'static', lang))
open(abspath('applications', 'examples', 'static',
lang, 'file'), 'w').close()
global oldcwd
if oldcwd is None: # do this only once
oldcwd = os.getcwd()
if not os.path.isdir('gluon'):
os.chdir(os.path.realpath(
'../../')) # run from web2py base directory
import gluon.main # for initialization after chdir
global logger
logger = logging.getLogger('web2py.rewrite')
global_settings.applications_parent = tempfile.mkdtemp()
global root
root = global_settings.applications_parent
make_apptree()
def tearDownModule():
global oldcwd
if oldcwd is not None:
os.chdir(oldcwd)
oldcwd = None
class TestRouter(unittest.TestCase):
""" Tests the routers logic from gluon.rewrite """
def myassertRaisesRegex(self, *args, **kwargs):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*args, **kwargs)
return getattr(self, 'assertRaisesRegex')(*args, **kwargs)
def test_router_syntax(self):
""" Test router syntax error """
level = logger.getEffectiveLevel()
logger.setLevel(logging.CRITICAL) # disable logging temporarily
self.assertRaises(SyntaxError, load, data='x::y')
self.assertRaises(
SyntaxError, load, rdict=dict(BASE=dict(badkey="value")))
self.assertRaises(SyntaxError, load, rdict=dict(
BASE=dict(), app=dict(default_application="name")))
self.myassertRaisesRegex(SyntaxError, "invalid syntax",
load, data='x::y')
self.myassertRaisesRegex(SyntaxError, "unknown key",
load, rdict=dict(BASE=dict(badkey="value")))
self.myassertRaisesRegex(SyntaxError, "BASE-only key",
load, rdict=dict(BASE=dict(), app=dict(default_application="name")))
logger.setLevel(level)
def test_router_null(self):
""" Tests the null router """
load(rdict=dict())
# app resolution
self.assertEqual(
filter_url('http://domain.com/welcome', app=True), 'welcome')
self.assertEqual(filter_url('http://domain.com/', app=True), 'init')
# incoming
self.assertEqual(filter_url('http://domain.com/favicon.ico'),
norm_root('%s/applications/init/static/favicon.ico' % root))
self.assertEqual(
filter_url('http://domain.com/abc'), '/init/default/abc')
self.assertEqual(filter_url(
'http://domain.com/index/abc'), "/init/default/index ['abc']")
self.assertEqual(filter_url(
'http://domain.com/abc/def'), "/init/default/abc ['def']")
self.assertEqual(filter_url(
'http://domain.com/index/a%20bc'), "/init/default/index ['a bc']")
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/st~tic')
self.myassertRaisesRegex(HTTP, "400.*invalid static file", filter_url, 'http://domain.com/welcome/static/bad/path/to/st~tic')
# outgoing
self.assertEqual(
filter_url('http://domain.com/init/default/index', out=True), '/')
self.assertEqual(filter_url('http://domain.com/init/default/index/arg1', out=True), '/index/arg1')
self.assertEqual(filter_url(
'http://domain.com/init/default/abc', out=True), '/abc')
self.assertEqual(filter_url('http://domain.com/init/static/abc',
out=True), '/init/static/abc')
self.assertEqual(filter_url(
'http://domain.com/init/appadmin/index', out=True), '/appadmin')
self.assertEqual(filter_url(
'http://domain.com/init/appadmin/abc', out=True), '/appadmin/abc')
self.assertEqual(filter_url(
'http://domain.com/init/admin/index', out=True), '/init/admin')
self.assertEqual(filter_url(
'http://domain.com/init/admin/abc', out=True), '/init/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
def test_router_specific(self):
"""
Test app-specific routes.py
Note that make_apptree above created applications/examples/routes.py with a default_function.
"""
load(rdict=dict())
self.assertEqual(
filter_url('http://domain.com/welcome'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain.com/examples'), '/examples/default/exdef')
def test_router_defapp(self):
""" Test the default-application function """
routers = dict(BASE=dict(default_application='welcome'))
load(rdict=routers)
# app resolution
self.assertEqual(
filter_url('http://domain.com/welcome', app=True), 'welcome')
self.assertEqual(filter_url('http://domain.com/', app=True), 'welcome')
# incoming
self.assertEqual(
filter_url('http://domain.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain.com/'), '/welcome/default/index')
self.assertEqual(filter_url(
'http://domain.com/appadmin'), '/welcome/appadmin/index')
self.assertEqual(
filter_url('http://domain.com/abc'), '/welcome/default/abc')
self.assertEqual(filter_url(
'http://domain.com/index/abc'), "/welcome/default/index ['abc']")
self.assertEqual(filter_url(
'http://domain.com/abc/def'), "/welcome/default/abc ['def']")
self.assertEqual(filter_url('http://domain.com/favicon.ico'),
norm_root('%s/applications/welcome/static/favicon.ico' % root))
self.assertEqual(filter_url('http://domain.com/static/abc'),
norm_root('%s/applications/welcome/static/abc' % root))
self.assertEqual(filter_url('http://domain.com/static/path/to/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/static" % root))
# outgoing
self.assertEqual(filter_url(
'http://domain.com/welcome/default/index', out=True), '/')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/index/arg1')
self.assertEqual(filter_url(
'http://domain.com/welcome/default/abc', out=True), '/abc')
self.assertEqual(filter_url('http://domain.com/welcome/default/admin',
out=True), '/default/admin')
self.assertEqual(
filter_url('http://domain.com/welcome/static/abc', out=True),
'/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
def test_router_nodef(self):
""" Test no-default functions """
routers = dict(
BASE=dict(default_application='welcome'),
welcome=dict(controllers=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url(
'http://domain.com/welcome/default/index', out=True), '/default')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/default/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/default/abc')
self.assertEqual(
filter_url('http://domain.com/welcome/static/abc', out=True),
'/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertEqual(
filter_url('http://domain.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain.com/'), '/welcome/default/index')
self.assertEqual(filter_url(
'http://domain.com/appadmin'), '/welcome/appadmin/index')
self.assertEqual(
filter_url('http://domain.com/abc'), '/welcome/abc/index')
self.assertEqual(
filter_url('http://domain.com/index/abc'), "/welcome/index/abc")
self.assertEqual(
filter_url('http://domain.com/abc/def'), "/welcome/abc/def")
self.assertEqual(filter_url(
'http://domain.com/abc/def/ghi'), "/welcome/abc/def ['ghi']")
routers = dict(
BASE=dict(default_application=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url(
'http://domain.com/welcome/default/index', out=True), '/welcome')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/welcome/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/welcome/abc')
self.assertEqual(filter_url('http://domain.com/welcome/static/abc',
out=True), '/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/welcome/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/welcome/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertRaises(HTTP, filter_url, 'http://domain.com')
self.assertRaises(HTTP, filter_url, 'http://domain.com/appadmin')
self.myassertRaisesRegex(HTTP, "400.*invalid application",
filter_url, 'http://domain.com')
self.myassertRaisesRegex(HTTP, "400.*invalid application",
filter_url, 'http://domain.com/appadmin')
routers = dict(
BASE=dict(default_application='welcome', applications=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url(
'http://domain.com/welcome/default/index', out=True), '/welcome')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/welcome/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/welcome/abc')
self.assertEqual(filter_url('http://domain.com/welcome/static/abc',
out=True), '/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/welcome/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/welcome/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertEqual(
filter_url('http://domain.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain.com/'), '/welcome/default/index')
self.assertRaises(HTTP, filter_url, 'http://domain.com/appadmin')
self.myassertRaisesRegex(HTTP, "400.*unknown application: 'appadmin'", filter_url, 'http://domain.com/appadmin')
routers = dict(
BASE=dict(default_application='welcome', applications=None),
welcome=dict(controllers=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url('http://domain.com/welcome/default/index',
out=True), '/welcome/default')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/welcome/default/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/welcome/default/abc')
self.assertEqual(filter_url('http://domain.com/welcome/static/abc',
out=True), '/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/welcome/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/welcome/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertEqual(
filter_url('http://domain.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain.com/'), '/welcome/default/index')
self.assertRaises(HTTP, filter_url, 'http://domain.com/appadmin')
self.myassertRaisesRegex(HTTP, "400.*unknown application: 'appadmin'", filter_url, 'http://domain.com/appadmin')
routers = dict(
BASE=dict(default_application='welcome', applications=None),
welcome=dict(default_controller=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url('http://domain.com/welcome/default/index',
out=True), '/welcome/default')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/welcome/default/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/welcome/default/abc')
self.assertEqual(filter_url('http://domain.com/welcome/static/abc',
out=True), '/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/welcome/appadmin')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/welcome/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertRaises(HTTP, filter_url, 'http://domain.com')
self.assertRaises(HTTP, filter_url, 'http://domain.com/appadmin')
self.myassertRaisesRegex(HTTP, "400.*invalid controller",
filter_url, 'http://domain.com')
self.myassertRaisesRegex(HTTP, "400.*unknown application: 'appadmin'", filter_url, 'http://domain.com/appadmin')
routers = dict(
BASE=dict(default_application='welcome', applications=None),
welcome=dict(controllers=None, default_function=None),
)
load(rdict=routers)
# outgoing
self.assertEqual(filter_url('http://domain.com/welcome/default/index',
out=True), '/welcome/default/index')
self.assertEqual(filter_url('http://domain.com/welcome/default/index/arg1', out=True), '/welcome/default/index/arg1')
self.assertEqual(filter_url('http://domain.com/welcome/default/abc',
out=True), '/welcome/default/abc')
self.assertEqual(filter_url('http://domain.com/welcome/static/abc',
out=True), '/welcome/static/abc')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/index',
out=True), '/welcome/appadmin/index')
self.assertEqual(filter_url('http://domain.com/welcome/appadmin/abc',
out=True), '/welcome/appadmin/abc')
self.assertEqual(filter_url('http://domain.com/welcome/admin/index',
out=True), '/welcome/admin/index')
self.assertEqual(filter_url('http://domain.com/welcome/admin/abc',
out=True), '/welcome/admin/abc')
self.assertEqual(filter_url(
'http://domain.com/admin/default/abc', out=True), '/admin/abc')
# incoming
self.assertRaises(HTTP, filter_url, 'http://domain.com')
self.assertRaises(HTTP, filter_url, 'http://domain.com/appadmin')
self.myassertRaisesRegex(HTTP, "400.*invalid function",
filter_url, 'http://domain.com')
self.myassertRaisesRegex(HTTP, "400.*unknown application: 'appadmin'", filter_url, 'http://domain.com/appadmin')
def test_router_app(self):
""" Tests the doctest router app resolution"""
routers = dict(
BASE=dict(
domains={
"domain1.com": "app1",
"www.domain1.com": "app1",
"domain2.com": "app2",
},
),
app1=dict(),
app2=dict(),
goodapp=dict(),
)
routers['bad!app'] = dict()
load(rdict=routers)
self.assertEqual(
filter_url('http://domain.com/welcome', app=True), 'welcome')
self.assertEqual(
filter_url('http://domain.com/welcome/', app=True), 'welcome')
self.assertEqual(filter_url('http://domain.com', app=True), 'init')
self.assertEqual(filter_url('http://domain.com/', app=True), 'init')
self.assertEqual(filter_url('http://domain.com/abc', app=True), 'init')
self.assertEqual(
filter_url('http://domain1.com/abc', app=True), 'app1')
self.assertEqual(
filter_url('http://www.domain1.com/abc', app=True), 'app1')
self.assertEqual(
filter_url('http://domain2.com/abc', app=True), 'app2')
self.assertEqual(
filter_url('http://domain2.com/admin', app=True), 'admin')
routers['BASE']['exclusive_domain'] = True
load(rdict=routers)
self.assertEqual(
filter_url('http://domain2.com/admin', app=True), 'app2')
self.assertEqual(
filter_url('http://domain.com/goodapp', app=True), 'goodapp')
self.assertRaises(
HTTP, filter_url, 'http://domain.com/bad!app', app=True)
self.myassertRaisesRegex(HTTP, '400.*invalid application',
filter_url, 'http://domain.com/bad!app')
routers['BASE']['domains']['domain3.com'] = 'app3'
self.assertRaises(SyntaxError, load, rdict=routers)
self.myassertRaisesRegex(
SyntaxError, "unknown.*app3", load, rdict=routers)
def test_router_domains_fs(self):
'''
Test URLs that map domains using test filesystem layout
'''
routers = dict(
BASE=dict(
domains={
"domain1.com": "admin",
"domain2.com": "welcome",
},
),
)
load(rdict=routers)
self.assertEqual(
filter_url('http://domain1.com'), '/admin/default/index')
self.assertEqual(
filter_url('http://domain2.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://domain1.com/gae'), '/admin/gae/index')
self.assertEqual(
filter_url('http://domain2.com/other'), '/welcome/other/index')
self.assertEqual(
filter_url('http://domain1.com/gae/f1'), '/admin/gae/f1')
self.assertEqual(
filter_url('http://domain2.com/f2'), '/welcome/default/f2')
self.assertEqual(
filter_url('http://domain2.com/other/f3'), '/welcome/other/f3')
def test_router_domains(self):
'''
Test URLs that map domains
'''
routers = dict(
BASE=dict(
applications=['app1', 'app2', 'app2A',
'app3', 'app4', 'app5', 'app6'],
domains={
# two domains to the same app
"domain1.com": "app1",
"www.domain1.com": "app1",
# same domain, two ports, to two apps
"domain2.com": "app2a",
"domain2.com:8080": "app2b",
# two domains, same app, two controllers
"domain3a.com": "app3/c3a",
"domain3b.com": "app3/c3b",
# two domains, same app & controller, two functions
"domain4a.com": "app4/c4/f4a",
"domain4b.com": "app4/c4/f4b",
# http vs https
"domain6.com:80": "app6",
"domain6.com:443": "app6s",
},
),
app1=dict(default_controller='c1', default_function='f1',
controllers=['c1'], exclusive_domain=True, ),
app2a=dict(default_controller='c2a',
default_function='f2a', controllers=['c2a'], ),
app2b=dict(default_controller='c2b',
default_function='f2b', controllers=['c2b'], ),
app3=dict(controllers=['c3a', 'c3b'], ),
app4=dict(default_controller='c4', controllers=['c4']),
app5=dict(default_controller='c5',
controllers=['c5'], domain='localhost'),
app6=dict(default_controller='c6',
default_function='f6', controllers=['c6'], ),
app6s=dict(default_controller='c6s',
default_function='f6s', controllers=['c6s'], ),
)
load(rdict=routers)
self.assertEqual(filter_url('http://domain1.com/abc'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/c1/abc'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/abc.html'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/abc.css'), '/app1/c1/abc.css')
self.assertEqual(filter_url(
'http://domain1.com/index/abc'), "/app1/c1/index ['abc']")
self.assertEqual(filter_url('http://domain2.com/app1'), "/app1/c1/f1")
self.assertEqual(filter_url('https://domain1.com/app1/ctr/fcn',
domain=('app1', None), out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://www.domain1.com/app1/ctr/fcn',
domain=('app1', None), out=True), "/ctr/fcn")
self.assertEqual(
filter_url('http://domain2.com/abc'), '/app2a/c2a/abc')
self.assertEqual(
filter_url('http://domain2.com:8080/abc'), '/app2b/c2b/abc')
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/fcn',
domain=('app2a', None), out=True), "/ctr/fcn")
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/f2a',
domain=('app2a', None), out=True), "/ctr")
self.assertEqual(filter_url('http://domain2.com/app2a/c2a/f2a',
domain=('app2a', None), out=True), "/")
self.assertEqual(filter_url('http://domain2.com/app2a/c2a/fcn',
domain=('app2a', None), out=True), "/fcn")
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/fcn',
domain=('app2b', None), out=True), "/app2a/ctr/fcn")
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/f2a',
domain=('app2b', None), out=True), "/app2a/ctr")
self.assertEqual(filter_url('http://domain2.com/app2a/c2a/f2a',
domain=('app2b', None), out=True), "/app2a")
self.assertEqual(filter_url('http://domain3a.com/'), '/app3/c3a/index')
self.assertEqual(
filter_url('http://domain3a.com/abc'), '/app3/c3a/abc')
self.assertEqual(
filter_url('http://domain3a.com/c3b'), '/app3/c3b/index')
self.assertEqual(
filter_url('http://domain3b.com/abc'), '/app3/c3b/abc')
self.assertEqual(filter_url('http://domain3a.com/app3/c3a/fcn',
domain=('app3', 'c3a'), out=True), "/fcn")
self.assertEqual(filter_url('http://domain3a.com/app3/c3a/fcn',
domain=('app3', 'c3b'), out=True), "/c3a/fcn")
self.assertEqual(filter_url('http://domain3a.com/app3/c3a/fcn',
domain=('app1', None), out=True), "/app3/c3a/fcn")
self.assertEqual(filter_url('http://domain4a.com/abc'), '/app4/c4/abc')
self.assertEqual(filter_url('https://domain4a.com/app4/c4/fcn',
domain=('app4', None), out=True), "/fcn")
self.assertEqual(filter_url('http://domain4a.com'), '/app4/c4/f4a')
self.assertEqual(filter_url('http://domain4b.com'), '/app4/c4/f4b')
self.assertEqual(filter_url('http://localhost/abc'), '/app5/c5/abc')
self.assertEqual(filter_url(
'http:///abc'), '/app5/c5/abc') # test null host => localhost
self.assertEqual(filter_url('https://localhost/app5/c5/fcn',
domain=('app5', None), out=True), "/fcn")
self.assertEqual(filter_url('http://domain6.com'), '/app6/c6/f6')
self.assertEqual(filter_url('https://domain6.com'), '/app6s/c6s/f6s')
self.assertEqual(filter_url('http://domain2.com/app3/c3a/f3',
domain=('app2b', None), out=True), "/app3/c3a/f3")
self.assertRaises(SyntaxError, filter_url, 'http://domain1.com/app1/c1/f1', domain=('app2b', None), out=True)
self.myassertRaisesRegex(SyntaxError, 'cross-domain conflict', filter_url,
'http://domain1.com/app1/c1/f1', domain=('app2b', None), out=True)
self.assertEqual(filter_url('http://domain1.com/app1/c1/f1', domain=(
'app2b', None), host='domain2.com', out=True), "/app1")
def test_router_domains_ed(self):
'''
Test URLs that map domains with exclusive_domain set
'''
routers = dict(
BASE=dict(
applications=['app1', 'app2', 'app2A',
'app3', 'app4', 'app5', 'app6'],
exclusive_domain=True,
domains={
# two domains to the same app
"domain1.com": "app1",
"www.domain1.com": "app1",
# same domain, two ports, to two apps
"domain2.com": "app2a",
"domain2.com:8080": "app2b",
# two domains, same app, two controllers
"domain3a.com": "app3/c3a",
"domain3b.com": "app3/c3b",
# two domains, same app & controller, two functions
"domain4a.com": "app4/c4/f4a",
"domain4b.com": "app4/c4/f4b",
# http vs https
"domain6.com:80": "app6",
"domain6.com:443": "app6s",
},
),
app1=dict(default_controller='c1', default_function='f1',
controllers=['c1'], exclusive_domain=True, ),
app2a=dict(default_controller='c2a',
default_function='f2a', controllers=['c2a'], ),
app2b=dict(default_controller='c2b',
default_function='f2b', controllers=['c2b'], ),
app3=dict(controllers=['c3a', 'c3b'], ),
app4=dict(default_controller='c4', controllers=['c4']),
app5=dict(default_controller='c5',
controllers=['c5'], domain='localhost'),
app6=dict(default_controller='c6',
default_function='f6', controllers=['c6'], ),
app6s=dict(default_controller='c6s',
default_function='f6s', controllers=['c6s'], ),
)
load(rdict=routers)
self.assertEqual(filter_url('http://domain1.com/abc'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/c1/abc'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/abc.html'), '/app1/c1/abc')
self.assertEqual(
filter_url('http://domain1.com/abc.css'), '/app1/c1/abc.css')
self.assertEqual(filter_url(
'http://domain1.com/index/abc'), "/app1/c1/index ['abc']")
self.assertEqual(
filter_url('http://domain2.com/app1'), "/app2a/c2a/app1")
self.assertEqual(filter_url('https://domain1.com/app1/ctr/fcn',
domain=('app1', None), out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://www.domain1.com/app1/ctr/fcn',
domain=('app1', None), out=True), "/ctr/fcn")
self.assertEqual(
filter_url('http://domain2.com/abc'), '/app2a/c2a/abc')
self.assertEqual(
filter_url('http://domain2.com:8080/abc'), '/app2b/c2b/abc')
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/fcn',
domain=('app2a', None), out=True), "/ctr/fcn")
self.assertEqual(filter_url('http://domain2.com/app2a/ctr/f2a',
domain=('app2a', None), out=True), "/ctr")
self.assertEqual(filter_url('http://domain2.com/app2a/c2a/f2a',
domain=('app2a', None), out=True), "/")
self.assertEqual(filter_url('http://domain2.com/app2a/c2a/fcn',
domain=('app2a', None), out=True), "/fcn")
self.assertRaises(SyntaxError, filter_url, 'http://domain2.com/app2a/ctr/fcn', domain=('app2b', None), out=True)
self.assertRaises(SyntaxError, filter_url, 'http://domain2.com/app2a/ctr/f2a', domain=('app2b', None), out=True)
self.assertRaises(SyntaxError, filter_url, 'http://domain2.com/app2a/c2a/f2a', domain=('app2b', None), out=True)
self.assertEqual(filter_url('http://domain3a.com/'), '/app3/c3a/index')
self.assertEqual(
filter_url('http://domain3a.com/abc'), '/app3/c3a/abc')
self.assertEqual(
filter_url('http://domain3a.com/c3b'), '/app3/c3b/index')
self.assertEqual(
filter_url('http://domain3b.com/abc'), '/app3/c3b/abc')
self.assertEqual(filter_url('http://domain3a.com/app3/c3a/fcn',
domain=('app3', 'c3a'), out=True), "/fcn")
self.assertEqual(filter_url('http://domain3a.com/app3/c3a/fcn',
domain=('app3', 'c3b'), out=True), "/c3a/fcn")
self.assertRaises(SyntaxError, filter_url, 'http://domain3a.com/app3/c3a/fcn', domain=('app1', None), out=True)
self.assertEqual(filter_url('http://domain4a.com/abc'), '/app4/c4/abc')
self.assertEqual(filter_url('https://domain4a.com/app4/c4/fcn',
domain=('app4', None), out=True), "/fcn")
self.assertEqual(filter_url('http://domain4a.com'), '/app4/c4/f4a')
self.assertEqual(filter_url('http://domain4b.com'), '/app4/c4/f4b')
self.assertEqual(filter_url('http://localhost/abc'), '/app5/c5/abc')
self.assertEqual(filter_url(
'http:///abc'), '/app5/c5/abc') # test null host => localhost
self.assertEqual(filter_url('https://localhost/app5/c5/fcn',
domain=('app5', None), out=True), "/fcn")
self.assertEqual(filter_url('http://domain6.com'), '/app6/c6/f6')
self.assertEqual(filter_url('https://domain6.com'), '/app6s/c6s/f6s')
self.assertRaises(SyntaxError, filter_url, 'http://domain2.com/app3/c3a/f3', domain=('app2b', None), out=True)
self.assertRaises(SyntaxError, filter_url, 'http://domain1.com/app1/c1/f1', domain=('app2b', None), out=True)
self.myassertRaisesRegex(SyntaxError, 'cross-domain conflict', filter_url,
'http://domain1.com/app1/c1/f1', domain=('app2b', None), out=True)
self.assertEqual(filter_url('http://domain1.com/app1/c1/f1', domain=(
'app2b', None), host='domain2.com', out=True), "/app1")
def test_router_raise(self):
'''
Test URLs that raise exceptions
'''
# test non-exception variants
router_raise = dict(
init=dict(
controllers=[],
),
welcome=dict(
map_hyphen=False,
),
)
load(rdict=router_raise)
self.assertEqual(
filter_url('http://domain.com/ctl'), "/init/ctl/index")
self.assertEqual(
filter_url('http://domain.com/default/fcn'), "/init/default/fcn")
self.assertEqual(filter_url(
'http://domain.com/default/fcn.ext'), "/init/default/fcn.ext")
self.assertEqual(filter_url('http://domain.com/default/fcn/arg'),
"/init/default/fcn ['arg']")
# now raise-HTTP variants
self.assertRaises(HTTP, filter_url, 'http://domain.com/bad!ctl')
self.assertRaises(HTTP, filter_url, 'http://domain.com/ctl/bad!fcn')
self.assertRaises(
HTTP, filter_url, 'http://domain.com/ctl/fcn.bad!ext')
self.assertRaises(
HTTP, filter_url, 'http://domain.com/ctl/fcn/bad!arg')
self.myassertRaisesRegex(HTTP, '400.*invalid controller', filter_url, 'http://domain.com/init/bad!ctl')
self.myassertRaisesRegex(HTTP, '400.*invalid function', filter_url,
'http://domain.com/init/ctlr/bad!fcn')
self.myassertRaisesRegex(HTTP, '400.*invalid extension', filter_url,
'http://domain.com/init/ctlr/fcn.bad!ext')
self.myassertRaisesRegex(HTTP, '400.*invalid arg', filter_url,
'http://domain.com/appc/init/fcn/bad!arg')
self.assertEqual(filter_url('http://domain.com/welcome/default/fcn_1'),
"/welcome/default/fcn_1")
self.assertRaises(
HTTP, filter_url, 'http://domain.com/welcome/default/fcn-1')
self.myassertRaisesRegex(HTTP, '400.*invalid function', filter_url,
'http://domain.com/welcome/default/fcn-1')
def test_router_out(self):
'''
Test basic outgoing routing
'''
router_out = dict(
BASE=dict(),
init=dict(controllers=['default', 'ctr'], ),
app=dict(),
)
load(rdict=router_out)
self.assertEqual(filter_url(
'https://domain.com/app/ctr/fcn', out=True), "/app/ctr/fcn")
self.assertEqual(filter_url(
'https://domain.com/init/ctr/fcn', out=True), "/ctr/fcn")
self.assertEqual(filter_url(
'https://domain.com/init/ctr/fcn', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/init/static/file',
out=True), "/init/static/file")
self.assertEqual(filter_url('https://domain.com/init/static/index',
out=True), "/init/static/index")
self.assertEqual(filter_url(
'https://domain.com/init/default/index', out=True), "/")
self.assertEqual(
filter_url('https://domain.com/init/ctr/index', out=True), "/ctr")
self.assertEqual(filter_url('http://domain.com/init/default/fcn?query',
out=True), "/fcn?query")
self.assertEqual(filter_url('http://domain.com/init/default/fcn#anchor', out=True), "/fcn#anchor")
self.assertEqual(
filter_url(
'http://domain.com/init/default/fcn?query#anchor', out=True),
"/fcn?query#anchor")
router_out['BASE']['map_static'] = True
load(rdict=router_out)
self.assertEqual(filter_url(
'https://domain.com/init/static/file', out=True), "/static/file")
self.assertEqual(filter_url('https://domain.com/init/static/index',
out=True), "/static/index")
router_out['init']['map_static'] = None
load(rdict=router_out)
self.assertEqual(filter_url('https://domain.com/init/static/file',
out=True), "/init/static/file")
self.assertEqual(filter_url('https://domain.com/init/static/index',
out=True), "/init/static/index")
def test_router_functions(self):
'''
Test function-omission with functions=[something]
'''
router_functions = dict(
BASE=dict(
applications=['init', 'app', 'app2'],
default_application='app',
),
init=dict(
controllers=['default'],
),
app=dict(
controllers=['default', 'ctr'],
functions=dict(
default=['index', 'user', 'help'],
ctr=['ctrf1', 'ctrf2', 'ctrf3'],
),
default_function=dict(
default='index',
ctr='ctrf1',
),
),
app2=dict(
controllers=['default', 'ctr'],
functions=['index', 'user', 'help'],
),
)
load(rdict=router_functions)
# outbound
self.assertEqual(str(
URL(a='init', c='default', f='f', args=['arg1'])), "/init/f/arg1")
self.assertEqual(str(URL(a='init', c='default', f='index',
args=['arg1'])), "/init/index/arg1")
self.assertEqual(
str(URL(a='app', c='default', f='index', args=['arg1'])), "/arg1")
self.assertEqual(str(
URL(a='app', c='default', f='user', args=['arg1'])), "/user/arg1")
self.assertEqual(str(URL(
a='app', c='default', f='user', args=['index'])), "/user/index")
self.assertEqual(str(URL(
a='app', c='default', f='index', args=['index'])), "/index/index")
self.assertEqual(str(URL(
a='app', c='default', f='index', args=['init'])), "/index/init")
self.assertEqual(str(
URL(a='app', c='default', f='index', args=['ctr'])), "/index/ctr")
self.assertEqual(str(
URL(a='app', c='ctr', f='index', args=['arg'])), "/ctr/index/arg")
self.assertEqual(
str(URL(a='app', c='ctr', f='ctrf1', args=['arg'])), "/ctr/arg")
self.assertEqual(str(URL(
a='app', c='ctr', f='ctrf1', args=['ctrf2'])), "/ctr/ctrf1/ctrf2")
self.assertEqual(str(URL(
a='app2', c='default', f='index', args=['arg1'])), "/app2/arg1")
self.assertEqual(str(URL(a='app2', c='default', f='user',
args=['arg1'])), "/app2/user/arg1")
self.assertEqual(str(URL(a='app2', c='default', f='user',
args=['index'])), "/app2/user/index")
self.assertEqual(str(URL(a='app2', c='default', f='index',
args=['index'])), "/app2/index/index")
self.assertEqual(str(URL(a='app2', c='default', f='index',
args=['init'])), "/app2/index/init")
self.assertEqual(str(URL(a='app2', c='default', f='index',
args=['ctr'])), "/app2/index/ctr")
# outbound - with extensions
self.assertEqual(
str(URL(a='app', c='default', f='index.json')), "/index.json")
self.assertEqual(
str(URL(a='app', c='default', f='index.json', args=['arg1'])), "/index.json/arg1")
self.assertEqual(str(
URL(a='app', c='default', f='user.json')), "/user.json")
self.assertEqual(str(
URL(a='app', c='default', f='user.json', args=['arg1'])), "/user.json/arg1")
self.assertEqual(str(URL(
a='app', c='default', f='user.json', args=['index'])), "/user.json/index")
self.assertEqual(str(
URL(a='app', c='default', f='index.json', args=['ctr'])), "/index.json/ctr")
self.assertEqual(
str(URL(a='app', c='ctr', f='ctrf1.json', args=['arg'])), "/ctr/ctrf1.json/arg")
self.assertEqual(str(URL(
a='app2', c='default', f='index.json', args=['arg1'])), "/app2/index.json/arg1")
self.assertEqual(str(URL(
a='app2', c='ctr', f='index.json', args=['arg1'])), "/app2/ctr/index.json/arg1")
# inbound
self.assertEqual(
filter_url('http://d.com/arg'), "/app/default/index ['arg']")
self.assertEqual(filter_url('http://d.com/user'), "/app/default/user")
self.assertEqual(
filter_url('http://d.com/user/arg'), "/app/default/user ['arg']")
self.assertEqual(filter_url('http://d.com/ctr'), "/app/ctr/ctrf1")
self.assertEqual(
filter_url('http://d.com/ctr/arg'), "/app/ctr/ctrf1 ['arg']")
self.assertEqual(filter_url(
'http://d.com/app2/arg'), "/app2/default/index ['arg']")
self.assertEqual(
filter_url('http://d.com/app2/user'), "/app2/default/user")
self.assertEqual(filter_url(
'http://d.com/app2/user/arg'), "/app2/default/user ['arg']")
self.assertEqual(
filter_url('http://d.com/app2/ctr'), "/app2/ctr/index")
self.assertEqual(filter_url(
'http://d.com/app2/ctr/index/arg'), "/app2/ctr/index ['arg']")
self.assertEqual(
filter_url('http://d.com/app2/ctr/arg'), "/app2/ctr/arg")
# inbound - with extensions
self.assertEqual(
filter_url('http://d.com/index.json'), "/app/default/index.json")
self.assertEqual(filter_url('http://d.com/user.json'), "/app/default/user.json")
self.assertEqual(
filter_url('http://d.com/user.json/arg'), "/app/default/user.json ['arg']")
self.assertEqual(
filter_url('http://d.com/user.json/index'), "/app/default/user.json ['index']")
self.assertEqual(
filter_url('http://d.com/index.json/ctr'), "/app/default/index.json ['ctr']")
self.assertEqual(
filter_url('http://d.com/ctr/ctrf1.json/arg'), "/app/ctr/ctrf1.json ['arg']")
self.assertEqual(filter_url(
'http://d.com/app2/index.json/arg'), "/app2/default/index.json ['arg']")
self.assertEqual(
filter_url('http://d.com/app2/user.json'), "/app2/default/user.json")
self.assertEqual(filter_url(
'http://d.com/app2/user.json/arg'), "/app2/default/user.json ['arg']")
self.assertEqual(filter_url(
'http://d.com/app2/ctr/index.json/arg'), "/app2/ctr/index.json ['arg']")
self.assertEqual(
filter_url('http://d.com/app2/ctr/arg'), "/app2/ctr/arg")
def test_router_functions2(self):
'''
Test more functions=[something]
'''
router_functions = dict(
BASE=dict(
default_application='init',
applications='INIT',
),
init=dict(
#default_controller = 'default',
controllers=['default', 'ctr'],
#default_function = 'index',
functions=['index', 'user', 'register', 'basicRegister',
'download', 'call', 'data', 'error']
),
)
load(rdict=router_functions)
# outbound
self.assertEqual(str(
URL(a='init', c='default', f='index', args=['arg1'])), "/arg1")
self.assertEqual(str(URL(
a='init', c='default', f='user', args=['arg1'])), "/user/arg1")
self.assertEqual(str(URL(
a='init', c='default', f='user', args=['index'])), "/user/index")
self.assertEqual(str(URL(a='init', c='default', f='index',
args=['index'])), "/index/index")
self.assertEqual(str(
URL(a='init', c='default', f='index', args=['init'])), "/init")
self.assertEqual(str(URL(
a='init', c='default', f='index', args=['ctr'])), "/index/ctr")
self.assertEqual(str(URL(
a='init', c='ctr', f='index', args=['arg'])), "/ctr/index/arg")
self.assertEqual(str(URL(
a='init', c='ctr', f='ctrf1', args=['arg'])), "/ctr/ctrf1/arg")
self.assertEqual(str(URL(a='init', c='ctr', f='ctrf1',
args=['ctrf2'])), "/ctr/ctrf1/ctrf2")
self.assertEqual(
str(URL(a='init', c='default', f='register')), "/register")
# inbound
self.assertEqual(
filter_url('http://d.com/arg'), "/init/default/index ['arg']")
self.assertEqual(filter_url('http://d.com/user'), "/init/default/user")
self.assertEqual(
filter_url('http://d.com/user/arg'), "/init/default/user ['arg']")
self.assertEqual(filter_url('http://d.com/ctr'), "/init/ctr/index")
self.assertEqual(filter_url(
'http://d.com/ctr/ctrf1/arg'), "/init/ctr/ctrf1 ['arg']")
def test_router_hyphen(self):
'''
Test hyphen conversion
'''
router_hyphen = dict(
BASE=dict(
applications=['init', 'app1', 'app2'],
),
init=dict(
controllers=['default'],
),
app1=dict(
controllers=['default'],
map_hyphen=True,
),
app2=dict(
controllers=['default'],
map_hyphen=False,
),
)
load(rdict=router_hyphen)
self.assertEqual(filter_url(
'http://domain.com/init/default/fcn_1', out=True), "/fcn_1")
self.assertEqual(
filter_url('http://domain.com/static/filename-with_underscore'),
norm_root("%s/applications/init/static/filename-with_underscore" % root))
self.assertEqual(
filter_url('http://domain.com/init/static/filename-with_underscore', out=True),
"/init/static/filename-with_underscore")
self.assertEqual(filter_url('http://domain.com/app2/fcn_1'),
"/app2/default/fcn_1")
self.assertEqual(
filter_url('http://domain.com/app2/ctr/fcn_1',
domain=('app2', None), out=True),
"/ctr/fcn_1")
self.assertEqual(
filter_url('http://domain.com/app2/static/filename-with_underscore', domain=('app2', None), out=True),
"/app2/static/filename-with_underscore")
self.assertEqual(
filter_url(
'http://domain.com/app2/static/filename-with_underscore'),
norm_root("%s/applications/app2/static/filename-with_underscore" % root))
from gluon.globals import current
current.response.static_version = None
self.assertEqual(str(URL(a='init', c='default', f='a_b')), "/a_b")
self.assertEqual(str(URL(a='app1', c='default', f='a_b')), "/app1/a-b")
self.assertEqual(str(URL(a='app2', c='default', f='a_b')), "/app2/a_b")
self.assertEqual(
str(URL(a='app1', c='static', f='a/b_c')), "/app1/static/a/b_c")
self.assertEqual(
str(URL(a='app1', c='static/a', f='b_c')), "/app1/static/a/b_c")
self.assertEqual(
str(URL(a='app2', c='static', f='a/b_c')), "/app2/static/a/b_c")
self.assertEqual(
str(URL(a='app2', c='static/a', f='b_c')), "/app2/static/a/b_c")
def test_router_lang(self):
'''
Test language specifications
'''
router_lang = dict(
BASE=dict(default_application='admin'),
welcome=dict(),
admin=dict(
controllers=['default', 'ctr'],
languages=['en', 'it', 'it-it'], default_language='en',
),
examples=dict(
languages=['en', 'it', 'it-it'], default_language='en',
),
)
load(rdict=router_lang)
self.assertEqual(filter_url('http://domain.com/index/abc'),
"/admin/default/index ['abc'] (en)")
self.assertEqual(filter_url('http://domain.com/en/abc/def'),
"/admin/default/abc ['def'] (en)")
self.assertEqual(filter_url('http://domain.com/it/abc/def'),
"/admin/default/abc ['def'] (it)")
self.assertEqual(filter_url('http://domain.com/it-it/abc/def'),
"/admin/default/abc ['def'] (it-it)")
self.assertEqual(filter_url('http://domain.com/index/a%20bc'),
"/admin/default/index ['a bc'] (en)")
self.assertEqual(filter_url('http://domain.com/static/file'),
norm_root("%s/applications/admin/static/file" % root))
self.assertEqual(filter_url('http://domain.com/en/static/file'),
norm_root("%s/applications/admin/static/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/en/static/file'),
norm_root("%s/applications/examples/static/en/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/static/file'),
norm_root("%s/applications/examples/static/en/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/it/static/file'),
norm_root("%s/applications/examples/static/it/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/it-it/static/file'),
norm_root("%s/applications/examples/static/file" % root))
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='en', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it', out=True), "/it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it-it', out=True), "/it-it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='en', out=True), "/admin/en/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', out=True), "/admin/it/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it-it', out=True), "/admin/it-it/static/file")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='it', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='es', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
language='en', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
language='it', out=True), "/it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
language='it-it', out=True), "/it-it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
language='en', out=True), "/admin/en/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
language='it', out=True), "/admin/it/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
language='it-it', out=True), "/admin/it-it/static/file")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
language='it', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
language='es', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it', language='en', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='en', language='it', out=True), "/it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it', language='it-it', out=True), "/it-it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', language='en', out=True), "/admin/en/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', language='it', out=True), "/admin/it/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', language='it-it', out=True), "/admin/it-it/static/file")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='it', language='it', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='it', language='es', out=True), "/welcome/ctr/fcn")
router_lang['admin']['map_static'] = True
load(rdict=router_lang)
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='en', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it', out=True), "/it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it-it', out=True), "/it-it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='en', out=True), "/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', out=True), "/it/static/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it-it', out=True), "/it-it/static/file")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='it', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='es', out=True), "/welcome/ctr/fcn")
router_lang['admin']['map_static'] = False
router_lang['examples']['map_static'] = False
load(rdict=router_lang)
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='en', out=True), "/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it', out=True), "/it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/ctr/fcn',
lang='it-it', out=True), "/it-it/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='en', out=True), "/admin/static/en/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it', out=True), "/admin/static/it/file")
self.assertEqual(filter_url('https://domain.com/admin/static/file',
lang='it-it', out=True), "/admin/static/it-it/file")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='it', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('https://domain.com/welcome/ctr/fcn',
lang='es', out=True), "/welcome/ctr/fcn")
self.assertEqual(filter_url('http://domain.com/static/file'),
norm_root("%s/applications/admin/static/file" % root))
self.assertEqual(filter_url('http://domain.com/en/static/file'),
norm_root("%s/applications/admin/static/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/en/static/file'),
norm_root("%s/applications/examples/static/en/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/static/file'),
norm_root("%s/applications/examples/static/en/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/it/static/file'),
norm_root("%s/applications/examples/static/it/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/it-it/static/file'),
norm_root("%s/applications/examples/static/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/static/en/file').replace('/', os.sep),
norm_root("%s/applications/examples/static/en/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/static/it/file').replace('/', os.sep),
norm_root("%s/applications/examples/static/it/file" % root))
self.assertEqual(filter_url('http://domain.com/examples/static/it-it/file').replace('/', os.sep),
norm_root("%s/applications/examples/static/it-it/file" % root))
def test_router_get_effective(self):
'''
Test get_effective_router
'''
router_get_effective = dict(
BASE=dict(
default_application='a1',
applications=['a1', 'a2'],
),
a1=dict(
controllers=['c1a', 'c1b', 'default'],
),
a2=dict(
default_controller='c2',
controllers=[],
),
a3=dict(
default_controller='c2',
controllers=['c1'],
),
a4=dict(
default_function='f1',
functions=['f2'],
),
)
load(rdict=router_get_effective)
self.assertEqual(
get_effective_router('BASE').applications, set(['a1', 'a2']))
self.assertEqual(
get_effective_router('BASE').default_application, 'a1')
self.assertEqual(get_effective_router('BASE').domains, {})
self.assertEqual(get_effective_router('a1').applications, None)
self.assertEqual(get_effective_router('a1').default_application, None)
self.assertEqual(get_effective_router('a1').domains, None)
self.assertEqual(
get_effective_router('a1').default_controller, "default")
self.assertEqual(get_effective_router('a2').default_application, None)
self.assertEqual(get_effective_router('a2').default_controller, "c2")
self.assertEqual(get_effective_router(
'a1').controllers, set(['c1a', 'c1b', 'default', 'static']))
self.assertEqual(get_effective_router('a2').controllers, set())
self.assertEqual(get_effective_router(
'a3').controllers, set(['c1', 'c2', 'static']))
self.assertEqual(get_effective_router(
'a4').functions, dict(default=set(['f1', 'f2'])))
self.assertEqual(get_effective_router('xx'), None)
def test_router_error(self):
'''
Test rewrite of HTTP errors
'''
router_err = dict()
load(rdict=router_err)
self.assertEqual(filter_err(200), 200)
self.assertEqual(filter_err(399), 399)
self.assertEqual(filter_err(400), 400)
def test_router_static_path(self):
'''
Test validation of static paths
Stock pattern: file_match = r'([-+=@$%\w]+[./]?)+$'
'''
load(rdict=dict())
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/st~tic')
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to--/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to--/static" % root))
self.assertEqual(filter_url('http://domain.com/welcome/static/path/==to--/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/==to--/static" % root))
self.assertEqual(filter_url('http://domain.com/welcome/static/path/-+=@$%/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/-+=@$%%/static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/.static')
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/s..tatic')
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to//static')
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/#static')
router_static = dict(
BASE=dict(
file_match=r'([-+=@$%#\w]+[./]?)+$', # legal static path
),
)
load(rdict=router_static)
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/#static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/#static" % root))
router_static = dict(
BASE=dict(
file_match=r'[-+=@$%#.\w]+$', # legal static path element
),
)
load(rdict=router_static)
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to/st~tic')
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to--/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to--/static" % root))
self.assertEqual(filter_url('http://domain.com/welcome/static/path/==to--/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/==to--/static" % root))
self.assertEqual(filter_url('http://domain.com/welcome/static/path/-+=@$%/static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/-+=@$%%/static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/to//static')
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/#static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/#static" % root))
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/./static')
self.assertRaises(HTTP, filter_url, 'http://domain.com/welcome/static/bad/path/../static')
self.assertEqual(filter_url('http://domain.com/welcome/static/path/.../static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/.../static" % root))
self.assertEqual(filter_url('http://domain.com/welcome/static/path/to/.static').replace('/', os.sep),
norm_root("%s/applications/welcome/static/path/to/.static" % root))
def test_router_args(self):
'''
Test URL args parsing/generation
'''
load(rdict=dict())
self.assertEqual(filter_url('http://domain.com/init/default/f/arg1'),
"/init/default/f ['arg1']")
self.assertEqual(filter_url('http://domain.com/init/default/f/arg1/'),
"/init/default/f ['arg1']")
self.assertEqual(filter_url('http://domain.com/init/default/f/arg1//'),
"/init/default/f ['arg1', '']")
self.assertEqual(filter_url('http://domain.com/init/default/f//arg1'),
"/init/default/f ['', 'arg1']")
self.assertEqual(
filter_url('http://domain.com/init/default/f/arg1/arg2'),
"/init/default/f ['arg1', 'arg2']")
self.assertEqual(
filter_url('http://domain.com/init/default/f/arg1//arg2'),
"/init/default/f ['arg1', '', 'arg2']")
self.assertEqual(
filter_url('http://domain.com/init/default/f/arg1//arg3/'),
"/init/default/f ['arg1', '', 'arg3']")
self.assertEqual(
filter_url('http://domain.com/init/default/f/arg1//arg3//'),
"/init/default/f ['arg1', '', 'arg3', '']")
self.assertEqual(
filter_url('http://domain.com/init/default/f', out=True), "/f")
self.assertEqual(map_url_out(None, None, 'init', 'default',
'f', None, None, None, None, None), "/f")
self.assertEqual(map_url_out(None, None, 'init', 'default',
'f', [], None, None, None, None), "/f")
self.assertEqual(map_url_out(None, None, 'init', 'default',
'f', ['arg1'], None, None, None, None), "/f")
self.assertEqual(map_url_out(None, None, 'init', 'default',
'f', ['arg1', ''], None, None, None, None), "/f")
self.assertEqual(
str(URL(a='init', c='default', f='f', args=None)), "/f")
self.assertEqual(
str(URL(a='init', c='default', f='f', args=['arg1'])), "/f/arg1")
self.assertEqual(str(URL(
a='init', c='default', f='f', args=['arg1', ''])), "/f/arg1//")
self.assertEqual(str(URL(a='init', c='default', f='f',
args=['arg1', '', 'arg3'])), "/f/arg1//arg3")
self.assertEqual(str(
URL(a='init', c='default', f='f', args=['ar g'])), "/f/ar%20g")
self.assertEqual(str(
URL(a='init', c='default', f='f', args=['årg'])), "/f/%C3%A5rg")
self.assertEqual(URL(a='init', c='default', f='fünc'), "/fünc")
self.assertEqual(
to_bytes(URL(a='init', c='default', f='fünc')), b"/f\xc3\xbcnc")
def test_routes_anchor(self):
'''
Test URL with anchor
'''
self.assertEqual(
str(URL(a='a', c='c', f='f', anchor='anchor')), "/a/c/f#anchor")
load(rdict=dict())
self.assertEqual(
str(URL(a='a', c='c', f='f', anchor='anchor')), "/a/c/f#anchor")
args = ['a1', 'a2']
self.assertEqual(
str(URL(a='a', c='c', f='f', args=args, anchor='anchor')),
"/a/c/f/a1/a2#anchor")
vars = dict(v1=1, v2=2)
self.assertEqual(
str(URL(a='a', c='c', f='f', vars=vars, anchor='anchor')),
"/a/c/f?v1=1&v2=2#anchor")
self.assertEqual(
str(URL(
a='a', c='c', f='f', args=args, vars=vars, anchor='anchor')),
"/a/c/f/a1/a2?v1=1&v2=2#anchor")
self.assertEqual(str(URL(a='init', c='default', f='index')),
"/")
self.assertEqual(str(URL(a='init', c='default', f='f')),
"/f")
self.assertEqual(
str(URL(a='init', c='default', f='index', anchor='anchor')),
"/#anchor")
self.assertEqual(
str(URL(a='init', c='default', f='f', anchor='anchor')),
"/f#anchor")
def test_router_prefix(self):
'''
Test path_prefix
'''
router_path_prefix = dict(
BASE=dict(
default_application='a1',
applications=['a1', 'a2'],
path_prefix='/path/to/apps',
),
a1=dict(
controllers=['c1a', 'c1b', 'default'],
),
a2=dict(
default_controller='c2',
controllers=[],
),
)
load(rdict=router_path_prefix)
self.assertEqual(str(URL(a='a1', c='c1a', f='f')),
"/path/to/apps/c1a/f")
self.assertEqual(str(URL(a='a2', c='c', f='f')),
"/path/to/apps/a2/c/f")
self.assertEqual(str(URL(a='a2', c='c2', f='f')),
"/path/to/apps/a2/c2/f")
self.assertEqual(
filter_url('http://domain.com/a1/'), "/a1/default/index")
self.assertEqual(filter_url(
'http://domain.com/path/to/apps/a1/'), "/a1/default/index")
self.assertEqual(filter_url(
'http://domain.com/path/to/a1/'), "/a1/default/path ['to', 'a1']")
def test_router_absolute(self):
'''
Test absolute URL
'''
load(rdict=dict())
r = Storage()
r.env = Storage()
r.env.http_host = 'domain.com'
r.env.wsgi_url_scheme = 'httpx' # distinguish incoming scheme
self.assertEqual(str(URL(r=r, a='a', c='c', f='f')), "/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', host=True)),
"httpx://domain.com/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', host='host.com')),
"httpx://host.com/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', scheme=True)),
"httpx://domain.com/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', scheme=False)),
"/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', scheme='https')),
"https://domain.com/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', scheme='wss')),
"wss://domain.com/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme=True, host=True)),
"httpx://domain.com/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme='https', host=True)),
"https://domain.com/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme=False, host=True)),
"httpx://domain.com/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme=True, host='host.com')),
"httpx://host.com/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme=False, host='host.com')),
"httpx://host.com/a/c/f")
self.assertEqual(str(URL(r=r, a='a', c='c', f='f', port=1234)),
"httpx://domain.com:1234/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme=True, port=1234)),
"httpx://domain.com:1234/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', host='host.com', port=1234)),
"httpx://host.com:1234/a/c/f")
self.assertEqual(
str(URL(r=r, a='a', c='c', f='f', scheme='wss',
host='host.com', port=1234)),
"wss://host.com:1234/a/c/f")
def test_request_uri(self):
'''
Test REQUEST_URI in env
'''
load(rdict=dict())
self.assertEqual(
filter_url('http://domain.com/abc', env=True).request_uri,
'/init/default/abc')
self.assertEqual(
filter_url('http://domain.com/abc?def', env=True).request_uri,
'/init/default/abc?def')
self.assertEqual(
filter_url('http://domain.com/index/abc', env=True).request_uri,
"/init/default/index/abc")
self.assertEqual(
filter_url('http://domain.com/abc/def', env=True).request_uri,
"/init/default/abc/def")
self.assertEqual(
filter_url('http://domain.com/index/a%20bc', env=True).request_uri,
"/init/default/index/a%20bc")
def test_request_collide(self):
'''
Test controller-app name collision: admin vs welcome/admin
'''
router_collide = dict(
BASE=dict(
domains={
'ex.domain.com': 'examples',
'ad.domain.com': 'admin',
'welcome.com': 'welcome',
'www.welcome.com': 'welcome',
},
exclusive_domain=True,
),
)
load(rdict=router_collide)
# basic inbound
self.assertEqual(
filter_url('http://ex.domain.com'), '/examples/default/exdef')
self.assertEqual(
filter_url('http://ad.domain.com'), '/admin/default/index')
self.assertEqual(
filter_url('http://welcome.com'), '/welcome/default/index')
self.assertEqual(
filter_url('http://www.welcome.com'), '/welcome/default/index')
# basic outbound
self.assertEqual(filter_url('http://ex.domain.com/examples/default/exdef', domain='examples', out=True), "/")
self.assertEqual(filter_url('http://ad.domain.com/admin/default/index',
domain='admin', out=True), "/")
self.assertEqual(filter_url('http://welcome.com/welcome/default/index',
domain='welcome', out=True), "/")
self.assertEqual(filter_url('http://www.welcome.com/welcome/default/index', domain='welcome', out=True), "/")
# inbound
self.assertEqual(
filter_url('http://welcome.com/admin'), '/welcome/admin/index')
self.assertEqual(
filter_url('http://welcome.com/f1'), '/welcome/default/f1')
self.assertEqual(
filter_url('http://ad.domain.com/shell'), '/admin/shell/index')
self.assertEqual(
filter_url('http://ad.domain.com/f1'), '/admin/default/f1')
# outbound
self.assertEqual(filter_url('http://welcome.com/welcome/other/index',
domain='welcome', out=True), "/other")
self.assertEqual(filter_url('http://welcome.com/welcome/admin/index',
domain='welcome', out=True), "/admin")
self.assertEqual(filter_url('http://ad.domain.com/admin/shell/index',
domain='admin', out=True), "/shell")
self.assertEqual(filter_url('http://ad.domain.com/admin/default/f1',
domain='admin', out=True), "/f1")
router_collide['BASE']['exclusive_domain'] = False
load(rdict=router_collide)
self.assertEqual(filter_url('http://welcome.com/welcome/admin/index',
domain='welcome', out=True), "/welcome/admin")
| 50.300192
| 133
| 0.555676
| 8,991
| 78,418
| 4.768658
| 0.039373
| 0.156385
| 0.171429
| 0.194239
| 0.858962
| 0.830997
| 0.812945
| 0.79151
| 0.76049
| 0.722052
| 0
| 0.014636
| 0.2655
| 78,418
| 1,558
| 134
| 50.332478
| 0.729748
| 0.030758
| 0
| 0.583832
| 0
| 0
| 0.31979
| 0.042168
| 0
| 0
| 0
| 0
| 0.378743
| 1
| 0.021707
| false
| 0
| 0.010479
| 0.000749
| 0.03518
| 0.000749
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
135bd8aa131ecaaf92681aea0188e54a8f1a498f
| 23,272
|
py
|
Python
|
randlov1998/tasks_gotorewards.py
|
chrisdembia/agent-bicycle
|
1ecc3fcad8504385e9e85ccbc539464cb4e6c4e6
|
[
"MIT"
] | 10
|
2015-07-07T05:14:15.000Z
|
2020-06-20T00:57:38.000Z
|
randlov1998/tasks_gotorewards.py
|
chrisdembia/agent-bicycle
|
1ecc3fcad8504385e9e85ccbc539464cb4e6c4e6
|
[
"MIT"
] | null | null | null |
randlov1998/tasks_gotorewards.py
|
chrisdembia/agent-bicycle
|
1ecc3fcad8504385e9e85ccbc539464cb4e6c4e6
|
[
"MIT"
] | 6
|
2015-07-07T05:14:16.000Z
|
2021-04-25T02:07:47.000Z
|
import numpy as np
import pybrain.rl.environments
from pybrain.utilities import one_to_n
from environment import Environment
# The agent's actions are T and d.
# TODO where do we set up the generalization?
# TODO must pass omegadd to the learner.
# TODO the tiling might be achieved by implementing Task.getObservation.
# TODO states and actions are converted to int's within sarsa.py, ...what does
# this mean?
# TODO might need to use NFQ instead of Q or Sarsa.
# TODO NFQ might used a fix value of alpha as 0.5.
# TODO set epsilon for epsilon-greedy learning using learner.explorer.epsilon.
# TODO pybrain has limited examples of doing RL using continuous states and
# value-based learners (generalizing). Then we can use ActionValueNetwork, but
# it's not clear to me yet how this 'discretizes'/generalizes the state space.
class BalanceTask(pybrain.rl.environments.EpisodicTask):
"""The rider is to simply balance the bicycle while moving with the
prescribed speed.
This class is heavily guided by
pybrain.rl.environments.cartpole.balancetask.BalanceTask.
"""
# See Randlov's code. Paper and thesis say 12 degrees, but his code uses
# pi/15. These are actually equivalent.
#max_tilt = 12.0 * np.pi / 180.0
max_tilt = np.pi / 15.0
nactions = 9
def __init__(self, butt_disturbance_amplitude=0.02, only_steer=False,
max_time=1000.0):
"""
Parameters
----------
butt_disturbance_amplitude : float; optional
In meters.
"""
super(BalanceTask, self).__init__(Environment())
# Keep track of time in case we want to end episodes based on number of
# time steps.
self._butt_disturbance_amplitude = butt_disturbance_amplitude
self.only_steer = only_steer
self.max_time = max_time
self.t = 0
# TODO Sensor limits to normalize the sensor readings.
# TODO Actor limits.
#T_limits = (-2, 2) # Newtons.
#d_limits = (-0.02, 0.02) # meters.
## None for sensor limits; does not normalize sensor values.
## outdim should be set to the length of the sensors vector.
#self.setScaling([None] * self.env.outdim, [T_limits, d_limits])
self.action_history = np.zeros(self.nactions)
@property
def indim(self):
return 1
@property
def outdim(self):
return 5
def reset(self):
super(BalanceTask, self).reset()
self.t = 0
def performAction(self, action):
"""Incoming action is an int between 0 and 8. The action we provide to
the environment consists of a torque T in {-2 N, 0, 2 N}, and a
displacement d in {-.02 m, 0, 0.02 m}.
"""
self.t += 1
self.action_history += one_to_n(action[0], self.nactions)
# Map the action integer to a torque and displacement.
assert round(action[0]) == action[0]
if self.only_steer:
T = 2 * (action[0] / 4.0 - 1.0)
d = 0.
else:
# -1 for action in {0, 1, 2}, 0 for action in {3, 4, 5}, 1 for
# action in {6, 7, 8}
torque_selector = np.floor(action[0] / 3.0) - 1.0
T = 2 * torque_selector
# Random number in [-1, 1]:
p = 2.0 * np.random.rand() - 1.0
# -1 for action in {0, 3, 6}, 0 for action in {1, 4, 7}, 1 for
# action in {2, 5, 8}
disp_selector = action[0] % 3 - 1.0
d = 0.02 * disp_selector + self._butt_disturbance_amplitude * p
super(BalanceTask, self).performAction([T, d])
def getObservation(self):
(theta, thetad, omega, omegad, omegadd,
xf, yf, xb, yb, psi) = self.env.getSensors()
# TODO not calling superclass to do normalization, etc.
return self.env.getSensors()[0:5]
def isFinished(self):
# Criterion for ending an episode.
# "When the agent can balance for 1000 seconds, the task is considered
# learned."
if np.abs(self.env.getTilt()) > self.max_tilt:
return True
elapsed_time = self.env.time_step * self.t
if elapsed_time > self.max_time:
print 'hit max time.', self.t, elapsed_time
return True
return False
def getReward(self):
# -1 reward for falling over; no reward otherwise.
if np.abs(self.env.getTilt()) > self.max_tilt:
return -1.0
return 0.0
class GotoTask(BalanceTask):
""" The rider is to balance the bicycle while moving toward a
prescribed goal
"""
# Goal position and raduis
x_goal = 20.
y_goal = 8.
r_goal = 10.
@property
def outdim(self):
return 8
def getObservation(self):
# let the learner know about the front tire position and
# the heading.
(theta, thetad, omega, omegad, omegadd,
xf, yf, xb, yb, psi) = self.env.getSensors()
# TODO not calling superclass to do normalization, etc.
return [ self.env.getSensors()[i] for i in [0, 1, 2, 3, 4, 5, 6, 9] ]
def isFinished(self):
# Criterion for ending an episode.
# When the agent reaches the goal, the task is considered learned.
# When the agent falls down, the episode is over.
dist_to_goal = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
heading2 = self.env.getPsi()
if np.abs(self.env.getTilt()) > self.max_tilt:
print 'distance to goal ', dist_to_goal
return True
if dist_to_goal < 1e-3:
print 'reached goal'
return True
return False
def getReward(self):
# -1 reward for falling over
# 0.01 reward for close to goal
# return reward inversely proportional to heading error otherwise
r_factor = 0.0001
if np.abs(self.env.getTilt()) > self.max_tilt:
return -1.0
else:
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
if (temp < 1e-3):
return 0.01
else:
return (0.95 - heading**2) * r_factor
def calc_dist_to_goal(self):
# ported from Randlov's C code. See bike.c for the source
# code.
# unpack variables
x_goal = self.x_goal
y_goal = self.y_goal
r_goal = self.r_goal
xf = self.env.getXF()
yf = self.env.getYF()
sqrd_dist_to_goal = ( x_goal - xf )**2 + ( y_goal -yf )**2
temp = np.max([0, sqrd_dist_to_goal - r_goal**2])
# We probably don't need to actually compute a sqrt here if it
# helps simulation speed.
temp = np.sqrt(temp)
return temp
def calc_angle_to_goal(self):
# ported from Randlov's C code. See bike.c for the source
# code.
# the following explanation of the returned angle is
# verbatim from Randlov's C source:
# These angles are neither in degrees nor radians, but
# something strange invented in order to save CPU-time. The
# measure is arranged same way as radians, but with a
# slightly different negative factor
#
# Say the goal is to the east,
# If the agent rides to the east then temp = 0
# " " north " " = -1
# " " west " " = -2 or 2
# " " south " " = 1
#
# // end quote //
# TODO: see the psi calculation in the environment, which is not
# currently being used.
# unpack variables
x_goal = self.x_goal
y_goal = self.y_goal
xf = self.env.getXF()
xb = self.env.getXB()
yf = self.env.getYF()
yb = self.env.getYB()
# implement Randlov's angle computation
temp = (xf - xb) * (x_goal - xf) + (yf - yb) * (y_goal - yf)
scalar = temp / (1 * np.sqrt( (x_goal - xf)**2 + (y_goal - yf)**2))
tvaer = (-yf + yb) * (x_goal - xf) + (xf - xb) * (y_goal-yf)
if tvaer <= 0 :
temp = scalar - 1
else:
temp = np.abs(scalar - 1)
return temp
class LinearFATileCoding3456BalanceTask(BalanceTask):
"""An attempt to exactly implement Randlov's function approximation. He
discretized (tiled) the input space into 3456 tiles.
"""
# From Randlov, 1998:
theta_bounds = np.array(
[-0.5 * np.pi, -1.0, -0.2, 0, 0.2, 1.0, 0.5 * np.pi])
thetad_bounds = np.array(
[-np.inf, -2.0, 0, 2.0, np.inf])
omega_bounds = np.array(
[-BalanceTask.max_tilt, -0.15, -0.06, 0, 0.06, 0.15,
BalanceTask.max_tilt])
omegad_bounds = np.array(
[-np.inf, -0.5, -0.25, 0, 0.25, 0.5, np.inf])
omegadd_bounds = np.array(
[-np.inf, -2.0, 0, 2.0, np.inf])
# http://stackoverflow.com/questions/3257619/numpy-interconversion-between-multidimensional-and-linear-indexing
nbins_across_dims = [
len(theta_bounds) - 1,
len(thetad_bounds) - 1,
len(omega_bounds) - 1,
len(omegad_bounds) - 1,
len(omegadd_bounds) - 1]
# This array, when dotted with the 5-dim state vector, gives a 'linear'
# index between 0 and 3455.
magic_array = np.cumprod([1] + nbins_across_dims)[:-1]
def __init__(self, *args, **kwargs):
super(LinearFATileCoding3456BalanceTask, self).__init__(*args, **kwargs)
# Count the number of times that each state is visited.
self.bin_count = np.zeros(self.outdim)
@property
def outdim(self):
# Used when constructing LinearFALearner's.
return 3456
def getBin(self, theta, thetad, omega, omegad, omegadd):
bin_indices = [
np.digitize([theta], self.theta_bounds)[0] - 1,
np.digitize([thetad], self.thetad_bounds)[0] - 1,
np.digitize([omega], self.omega_bounds)[0] - 1,
np.digitize([omegad], self.omegad_bounds)[0] - 1,
np.digitize([omegadd], self.omegadd_bounds)[0] - 1,
]
linear_index = np.dot(self.magic_array, bin_indices)
if linear_index > self.outdim:
# DEBUGGING PRINTS
print self.isFinished()
print self.env.getTilt()
print np.abs(self.env.getTilt())
print self.max_tilt
print np.abs(self.env.getTilt()) > self.max_tilt
print self.env.getSensors()[0:5]
print self.magic_array
print bin_index_for_each_dim
print linear_index
return linear_index
def getBinIndices(self, linear_index):
"""Given a linear index (integer between 0 and outdim), returns the bin
indices for each of the state dimensions.
"""
return linear_index / self.magic_array % self.nbins_across_dims
def getObservation(self):
(theta, thetad, omega, omegad, omegadd,
xf, yf, xb, yb, psi) = self.env.getSensors()
# TODO not calling superclass to do normalization, etc.
state = one_to_n(self.getBin(theta, thetad, omega, omegad, omegadd),
self.outdim)
self.bin_count += state
return state
class LSPIBalanceTask(BalanceTask):
"""Lagoudakis, 2002; simplified for just balancing. Also, we're still using
all 9 possible actions.
"""
@property
def outdim(self):
# Used when constructing LinearFALearner's.
return 14
def getPhi(self, theta, thetad, omega, omegad, omegadd):
return np.array([
1, omega, omegad, omega**2, omegad**2, omega * omegad,
theta, thetad, theta**2, thetad**2, theta * thetad,
omega * theta, omega * theta**2, omega**2 * theta,
])
def getObservation(self):
(theta, thetad, omega, omegad, omegadd,
xf, yf, xb, yb, psi) = self.env.getSensors()
return self.getPhi(theta, thetad, omega, omegad, omegadd)
class LinearFATileCoding3456GoToTask(BalanceTask):
"""An attempt to exactly implement Randlov's function approximation. He
discretized (tiled) the input space into 3456 tiles.
"""
# Goal position and raduis
x_goal = 20.
y_goal = 20.
r_goal = 10.
# From Randlov, 1998:
theta_bounds = np.array(
[-0.5 * np.pi, -1.0, -0.2, 0, 0.2, 1.0, 0.5 * np.pi])
thetad_bounds = np.array(
[-np.inf, -2.0, 0, 2.0, np.inf])
omega_bounds = np.array(
[-BalanceTask.max_tilt, -0.15, -0.06, 0, 0.06, 0.15,
BalanceTask.max_tilt])
omegad_bounds = np.array(
[-np.inf, -0.5, -0.25, 0, 0.25, 0.5, np.inf])
omegadd_bounds = np.array(
[-np.inf, -2.0, 0, 2.0, np.inf])
psi_bounds = (np.pi/180) * np.array( range(0,360,18) )
# http://stackoverflow.com/questions/3257619/numpy-interconversion-between-multidimensional-and-linear-indexing
nbins_across_dims = [
len(theta_bounds) - 1,
len(thetad_bounds) - 1,
len(omega_bounds) - 1,
len(omegad_bounds) - 1,
len(omegadd_bounds) - 1]
# This array, when dotted with the 5-dim state vector, gives a 'linear'
# index between 0 and 3455.
magic_array = np.cumprod([1] + nbins_across_dims)[:-1]
@property
def outdim(self):
# Used when constructing LinearFALearner's.
return 3456 + 20
def getBin(self, theta, thetad, omega, omegad, omegadd):
bin_indices = [
np.digitize([theta], self.theta_bounds)[0] - 1,
np.digitize([thetad], self.thetad_bounds)[0] - 1,
np.digitize([omega], self.omega_bounds)[0] - 1,
np.digitize([omegad], self.omegad_bounds)[0] - 1,
np.digitize([omegadd], self.omegadd_bounds)[0] - 1,
]
linear_index = np.dot(self.magic_array, bin_indices)
if linear_index > self.outdim-20:
# DEBUGGING PRINTS
print "DEBUG"
print self.isFinished()
print self.env.getTilt()
print np.abs(self.env.getTilt())
print self.max_tilt
print np.abs(self.env.getTilt()) > self.max_tilt
print self.env.getSensors()
print self.magic_array
print self.getBinIndices(linear_index)
print linear_index
return linear_index
def getBinIndices(self, linear_index):
"""Given a linear index (integer between 0 and outdim), returns the bin
indices for each of the state dimensions.
"""
return linear_index / self.magic_array % self.nbins_across_dims
def getObservation(self):
(theta, thetad, omega, omegad, omegadd,
xf, yf, xb, yb, psi) = self.env.getSensors()
# TODO not calling superclass to do normalization, etc.
top_half = one_to_n(self.getBin(theta, thetad, omega, omegad, omegadd),
self.outdim - 20)
bot_half = one_to_n(np.digitize([psi], self.psi_bounds)[0] - 1, 20)
return np.concatenate((top_half,bot_half))
def isFinished(self):
# Criterion for ending an episode.
# When the agent reaches the goal, the task is considered learned.
# When the agent falls down, the episode is over.
if np.abs(self.env.getTilt()) > self.max_tilt:
return True
dist_to_goal = self.calc_dist_to_goal()
if dist_to_goal == 0:
print 'reached goal'
return True
#elapsed_time = self.env.time_step * self.t
#if elapsed_time > self.max_time:
# print 'hit max time.', self.t, elapsed_time
# return True
return False
def getReward(self):
# -1 reward for falling over
# 0.01 reward for close to goal
# return reward inversely proportional to heading error otherwise
r_factor = 0.0001
if np.abs(self.env.getTilt()) > self.max_tilt:
return -1.0
else:
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
if (temp < 1e-3):
return 0.01
else:
return (0.95 - heading**2) * r_factor
def calc_dist_to_goal(self):
# ported from Randlov's C code. See bike.c for the source
# code.
# unpack variables
x_goal = self.x_goal
y_goal = self.y_goal
r_goal = self.r_goal
xf = self.env.getXF()
yf = self.env.getYF()
sqrd_dist_to_goal = ( x_goal - xf )**2 + ( y_goal -yf )**2
temp = np.max([0, sqrd_dist_to_goal - r_goal**2])
# We probably don't need to actually compute a sqrt here if it
# helps simulation speed.
temp = np.sqrt(temp)
return temp
def calc_angle_to_goal(self):
# ported from Randlov's C code. See bike.c for the source
# code.
# the following explanation of the returned angle is
# verbatim from Randlov's C source:
# These angles are neither in degrees nor radians, but
# something strange invented in order to save CPU-time. The
# measure is arranged same way as radians, but with a
# slightly different negative factor
#
# Say the goal is to the east,
# If the agent rides to the east then temp = 0
# " " north " " = -1
# " " west " " = -2 or 2
# " " south " " = 1
#
# // end quote //
# TODO: see the psi calculation in the environment, which is not
# currently being used.
# unpack variables
x_goal = self.x_goal
y_goal = self.y_goal
xf = self.env.getXF()
xb = self.env.getXB()
yf = self.env.getYF()
yb = self.env.getYB()
# implement Randlov's angle computation
temp = (xf - xb) * (x_goal - xf) + (yf - yb) * (y_goal - yf)
scalar = temp / (1 * np.sqrt( (x_goal - xf)**2 + (y_goal - yf)**2))
tvaer = (-yf + yb) * (x_goal - xf) + (xf - xb) * (y_goal-yf)
if tvaer <= 0 :
temp = scalar - 1
else:
temp = np.abs(scalar - 1)
return temp
class Proportional3456ControlBalanceTask(LinearFATileCoding3456BalanceTask):
def getReward(self):
# -1 reward for falling over; no reward otherwise.
if np.abs(self.env.getTilt()) > self.max_tilt:
return -1.0
return -np.abs(self.env.getSensors()[0])
class LinearFATileCoding3456GoToTaskReward1(LinearFATileCoding3456GoToTask):
e =2.71828
def getReward(self):
r_factor = 0.0001
x = np.abs(self.env.getTilt())
PrevTilt = np.abs(self.env.TempTilt)
if x>LinearFATileCoding3456BalanceTask.max_tilt:
R = -15.0
else:
if x<1.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 5.0
elif 1.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 1.0
elif 2.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=LinearFATileCoding3456BalanceTask.max_tilt:
R = -5.0
if PrevTilt>x:
R += 3000.0*(PrevTilt-x)
if PrevTilt>2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R *= 2
if PrevTilt<x:
R += 3000.0*(PrevTilt-x)
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
if (temp < 1e-3):
R *= 10.0
else:
R -= 3000*np.abs(heading)
return R
class LinearFATileCoding3456GoToTaskReward2(LinearFATileCoding3456GoToTask):
e =2.71828
def getReward(self):
r_factor = 0.0001
x = np.abs(self.env.getTilt())
PrevTilt = np.abs(self.env.TempTilt)
if x>LinearFATileCoding3456BalanceTask.max_tilt:
R = -15.0
else:
if x<1.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 5.0
elif 1.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 1.0
elif 2.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=LinearFATileCoding3456BalanceTask.max_tilt:
R = -5.0
if PrevTilt>x:
R += 3000.0*(PrevTilt-x)
if PrevTilt>2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R *= 2
if PrevTilt<x:
R += 3000.0*(PrevTilt-x)
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
if (temp < 1e-3):
R *= 10.0
else:
R -= 200*np.abs(heading)
return R
class LinearFATileCoding3456GoToTaskReward3(LinearFATileCoding3456GoToTask):
e =2.71828
def getReward(self):
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
x = np.abs(self.env.getTilt())
PrevTilt = np.abs(self.env.TempTilt)
if x>LinearFATileCoding3456BalanceTask.max_tilt:
R = -15.0
else:
if x<1.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 5.0
elif 1.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 1.0
elif 2.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=LinearFATileCoding3456BalanceTask.max_tilt:
R = -5.0
if PrevTilt>x:
R += 3000.0*((PrevTilt-x)-np.abs(heading))
if PrevTilt>2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R *= 2
if PrevTilt<x:
R += 3000.0*(PrevTilt-x)
if (temp < 1e-3):
R *= 100.0
#else:
# R -= 200*np.abs(heading)
return R
class LinearFATileCoding3456GoToTaskReward4(LinearFATileCoding3456GoToTask):
e =2.71828
def getReward(self):
temp = self.calc_dist_to_goal()
heading = self.calc_angle_to_goal()
x = np.abs(self.env.getTilt())
PrevTilt = np.abs(self.env.TempTilt)
if x>LinearFATileCoding3456BalanceTask.max_tilt:
R = -15.0
else:
if x<1.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 5.0
elif 1.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R = 1.0
elif 2.0/3*LinearFATileCoding3456BalanceTask.max_tilt<=x and x<=LinearFATileCoding3456BalanceTask.max_tilt:
R = -5.0
if PrevTilt>x:
R += 3000.0*((PrevTilt-x)-1.0/10*np.abs(heading))
if PrevTilt>2.0/3*LinearFATileCoding3456BalanceTask.max_tilt:
R *= 2
if PrevTilt<x:
R += 3000.0*(PrevTilt-x)
if (temp < 1e-3):
R *= 100.0
return R
| 35.803077
| 125
| 0.577432
| 3,042
| 23,272
| 4.312952
| 0.139053
| 0.025076
| 0.085366
| 0.018293
| 0.74253
| 0.721494
| 0.717149
| 0.717149
| 0.714253
| 0.706402
| 0
| 0.0507
| 0.31858
| 23,272
| 650
| 126
| 35.803077
| 0.776643
| 0.221769
| 0
| 0.769821
| 0
| 0
| 0.003531
| 0
| 0
| 0
| 0
| 0.009231
| 0.002558
| 0
| null | null | 0
| 0.01023
| null | null | 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1369a9dbf807103d4d86b4f01e3a6de5863ab13f
| 117,121
|
py
|
Python
|
spacy/lang/ru/tag_map.py
|
Brixjohn/spaCy
|
c9a89bba507de9a78eb4d9c37f0d66033d9e2fd7
|
[
"MIT"
] | 88
|
2018-05-06T17:28:23.000Z
|
2022-03-06T20:19:16.000Z
|
spacy/lang/ru/tag_map.py
|
Brixjohn/spaCy
|
c9a89bba507de9a78eb4d9c37f0d66033d9e2fd7
|
[
"MIT"
] | 12
|
2018-07-19T15:11:57.000Z
|
2021-08-05T11:58:29.000Z
|
spacy/lang/ru/tag_map.py
|
Brixjohn/spaCy
|
c9a89bba507de9a78eb4d9c37f0d66033d9e2fd7
|
[
"MIT"
] | 10
|
2018-07-28T22:43:04.000Z
|
2020-11-22T22:58:21.000Z
|
# coding: utf8
from __future__ import unicode_literals
from ...symbols import (
POS, PUNCT, SYM, ADJ, NUM, DET, ADV, ADP, X, VERB, NOUN, PROPN, PART, INTJ, SPACE, PRON, SCONJ, AUX, CONJ, CCONJ
)
TAG_MAP = {
'ADJ__Animacy=Anim|Case=Acc|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Animacy': 'Anim', 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Animacy=Anim|Case=Acc|Degree=Pos|Number=Plur': {POS: ADJ, 'Animacy': 'Anim', 'Case': 'Acc', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Animacy=Anim|Case=Acc|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Animacy': 'Anim', 'Case': 'Acc', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Animacy=Anim|Case=Nom|Degree=Pos|Number=Plur': {POS: ADJ, 'Animacy': 'Anim', 'Case': 'Nom', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Animacy=Inan|Case=Acc|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Animacy=Inan|Case=Acc|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Animacy=Inan|Case=Acc|Degree=Pos|Number=Plur': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Animacy=Inan|Case=Acc|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Animacy=Inan|Case=Acc|Degree=Sup|Number=Plur': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Animacy=Inan|Case=Acc|Gender=Fem|Number=Sing': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Animacy=Inan|Case=Nom|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Animacy': 'Inan', 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Acc|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Acc|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Acc|Degree=Sup|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Acc', 'Degree': 'Sup', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Acc|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Acc', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Pos|Number=Plur': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Case=Dat|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Dat|Degree=Sup|Number=Plur': {POS: ADJ, 'Case': 'Dat', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Case=Gen|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Pos|Gender=Fem|Number=Sing|Variant=Short': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing', 'Variant': 'Short'},
'ADJ__Case=Gen|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Pos|Number=Plur': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Case=Gen|Degree=Sup|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Sup', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Gen|Degree=Sup|Number=Plur': {POS: ADJ, 'Case': 'Gen', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Case=Ins|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Pos|Number=Plur': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Case=Ins|Degree=Sup|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Sup', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Ins|Degree=Sup|Number=Plur': {POS: ADJ, 'Case': 'Ins', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Case=Loc|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Pos|Number=Plur': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Case=Loc|Degree=Sup|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Sup', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Loc|Degree=Sup|Number=Plur': {POS: ADJ, 'Case': 'Loc', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Case=Nom|Degree=Pos|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Pos|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Pos|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Pos|Number=Plur': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Pos', 'Number': 'Plur'},
'ADJ__Case=Nom|Degree=Sup|Gender=Fem|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Sup', 'Gender': 'Fem', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Sup|Gender=Masc|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Sup', 'Gender': 'Masc', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Sup|Gender=Neut|Number=Sing': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Sup', 'Gender': 'Neut', 'Number': 'Sing'},
'ADJ__Case=Nom|Degree=Sup|Number=Plur': {POS: ADJ, 'Case': 'Nom', 'Degree': 'Sup', 'Number': 'Plur'},
'ADJ__Degree=Cmp': {POS: ADJ, 'Degree': 'Cmp'},
'ADJ__Degree=Pos': {POS: ADJ, 'Degree': 'Pos'},
'ADJ__Degree=Pos|Gender=Fem|Number=Sing|Variant=Short': {POS: ADJ, 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing', 'Variant': 'Short'},
'ADJ__Degree=Pos|Gender=Masc|Number=Sing|Variant=Short': {POS: ADJ, 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing', 'Variant': 'Short'},
'ADJ__Degree=Pos|Gender=Neut|Number=Sing|Variant=Short': {POS: ADJ, 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing', 'Variant': 'Short'},
'ADJ__Degree=Pos|Number=Plur|Variant=Short': {POS: ADJ, 'Degree': 'Pos', 'Number': 'Plur', 'Variant': 'Short'},
'ADJ__Foreign=Yes': {POS: ADJ, 'Foreign': 'Yes'},
'ADJ___': {POS: ADJ},
'ADP___': {POS: ADP},
'ADV__Degree=Cmp': {POS: ADV, 'Degree': 'Cmp'},
'ADV__Degree=Pos': {POS: ADV, 'Degree': 'Pos'},
'ADV__Polarity=Neg': {POS: ADV, 'Polarity': 'Neg'},
'AUX__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'AUX__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'AUX__Aspect=Imp|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'AUX__Aspect=Imp|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Imp|Number=Plur|Person=2|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Imp|Number=Sing|Person=2|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Sing', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'AUX__Aspect=Imp|Tense=Pres|VerbForm=Conv|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'Tense': 'Pres', 'VerbForm': 'Conv', 'Voice': 'Act'},
'AUX__Aspect=Imp|VerbForm=Inf|Voice=Act': {POS: AUX, 'Aspect': 'Imp', 'VerbForm': 'Inf', 'Voice': 'Act'},
'CCONJ___': {POS: CCONJ},
'DET__Animacy=Inan|Case=Acc|Gender=Masc|Number=Sing': {POS: DET, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Animacy=Inan|Case=Acc|Gender=Neut|Number=Sing': {POS: DET, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Animacy=Inan|Case=Gen|Gender=Fem|Number=Sing': {POS: DET, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Animacy=Inan|Case=Gen|Number=Plur': {POS: DET, 'Animacy': 'Inan', 'Case': 'Gen', 'Number': 'Plur'},
'DET__Case=Acc|Degree=Pos|Number=Plur': {POS: DET, 'Case': 'Acc', 'Degree': 'Pos', 'Number': 'Plur'},
'DET__Case=Acc|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Acc|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Acc|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Acc|Number=Plur': {POS: DET, 'Case': 'Acc', 'Number': 'Plur'},
'DET__Case=Dat|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Dat|Gender=Masc|Number=Plur': {POS: DET, 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Plur'},
'DET__Case=Dat|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Dat|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Dat|Number=Plur': {POS: DET, 'Case': 'Dat', 'Number': 'Plur'},
'DET__Case=Gen|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Gen|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Gen|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Gen|Number=Plur': {POS: DET, 'Case': 'Gen', 'Number': 'Plur'},
'DET__Case=Ins|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Ins|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Ins|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Ins|Number=Plur': {POS: DET, 'Case': 'Ins', 'Number': 'Plur'},
'DET__Case=Loc|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Loc|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Loc|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Loc|Number=Plur': {POS: DET, 'Case': 'Loc', 'Number': 'Plur'},
'DET__Case=Nom|Gender=Fem|Number=Sing': {POS: DET, 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing'},
'DET__Case=Nom|Gender=Masc|Number=Plur': {POS: DET, 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'DET__Case=Nom|Gender=Masc|Number=Sing': {POS: DET, 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing'},
'DET__Case=Nom|Gender=Neut|Number=Sing': {POS: DET, 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing'},
'DET__Case=Nom|Number=Plur': {POS: DET, 'Case': 'Nom', 'Number': 'Plur'},
'DET__Gender=Masc|Number=Sing': {POS: DET, 'Gender': 'Masc', 'Number': 'Sing'},
'INTJ___': {POS: INTJ},
'NOUN__Animacy=Anim|Case=Acc|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Acc|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Acc|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Acc|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Acc|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Acc|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Acc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Acc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Dat|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Dat|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Dat', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Gen|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Gen|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Gen', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Ins|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Ins|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Ins', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Loc|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Loc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Loc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Nom|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Anim|Case=Nom|Number=Plur': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Nom', 'Number': 'Plur'},
'NOUN__Animacy=Anim|Case=Voc|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Anim', 'Case': 'Voc', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Acc|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Acc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Acc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Dat|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Dat|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Dat', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Gen|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Gen|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Gen', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Ins|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Ins|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Ins', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Loc|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Loc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Loc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Fem|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Fem|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Masc|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Neut|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Nom|Gender=Neut|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Case=Nom|Number=Plur': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Nom', 'Number': 'Plur'},
'NOUN__Animacy=Inan|Case=Par|Gender=Masc|Number=Sing': {POS: NOUN, 'Animacy': 'Inan', 'Case': 'Par', 'Gender': 'Masc', 'Number': 'Sing'},
'NOUN__Animacy=Inan|Gender=Fem': {POS: NOUN, 'Animacy': 'Inan', 'Gender': 'Fem'},
'NOUN__Animacy=Inan|Gender=Masc': {POS: NOUN, 'Animacy': 'Inan', 'Gender': 'Masc'},
'NOUN__Animacy=Inan|Gender=Neut': {POS: NOUN, 'Animacy': 'Inan', 'Gender': 'Neut'},
'NOUN__Case=Gen|Degree=Pos|Gender=Fem|Number=Sing': {POS: NOUN, 'Case': 'Gen', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'NOUN__Foreign=Yes': {POS: NOUN, 'Foreign': 'Yes'},
'NOUN___': {POS: NOUN},
'NUM__Animacy=Anim|Case=Acc': {POS: NUM, 'Animacy': 'Anim', 'Case': 'Acc'},
'NUM__Animacy=Anim|Case=Acc|Gender=Fem': {POS: NUM, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Fem'},
'NUM__Animacy=Anim|Case=Acc|Gender=Masc': {POS: NUM, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc'},
'NUM__Animacy=Inan|Case=Acc': {POS: NUM, 'Animacy': 'Inan', 'Case': 'Acc'},
'NUM__Animacy=Inan|Case=Acc|Gender=Fem': {POS: NUM, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem'},
'NUM__Animacy=Inan|Case=Acc|Gender=Masc': {POS: NUM, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc'},
'NUM__Case=Acc': {POS: NUM, 'Case': 'Acc'},
'NUM__Case=Acc|Gender=Fem': {POS: NUM, 'Case': 'Acc', 'Gender': 'Fem'},
'NUM__Case=Acc|Gender=Masc': {POS: NUM, 'Case': 'Acc', 'Gender': 'Masc'},
'NUM__Case=Acc|Gender=Neut': {POS: NUM, 'Case': 'Acc', 'Gender': 'Neut'},
'NUM__Case=Dat': {POS: NUM, 'Case': 'Dat'},
'NUM__Case=Dat|Gender=Fem': {POS: NUM, 'Case': 'Dat', 'Gender': 'Fem'},
'NUM__Case=Dat|Gender=Masc': {POS: NUM, 'Case': 'Dat', 'Gender': 'Masc'},
'NUM__Case=Dat|Gender=Neut': {POS: NUM, 'Case': 'Dat', 'Gender': 'Neut'},
'NUM__Case=Gen': {POS: NUM, 'Case': 'Gen'},
'NUM__Case=Gen|Gender=Fem': {POS: NUM, 'Case': 'Gen', 'Gender': 'Fem'},
'NUM__Case=Gen|Gender=Masc': {POS: NUM, 'Case': 'Gen', 'Gender': 'Masc'},
'NUM__Case=Gen|Gender=Neut': {POS: NUM, 'Case': 'Gen', 'Gender': 'Neut'},
'NUM__Case=Ins': {POS: NUM, 'Case': 'Ins'},
'NUM__Case=Ins|Gender=Fem': {POS: NUM, 'Case': 'Ins', 'Gender': 'Fem'},
'NUM__Case=Ins|Gender=Masc': {POS: NUM, 'Case': 'Ins', 'Gender': 'Masc'},
'NUM__Case=Ins|Gender=Neut': {POS: NUM, 'Case': 'Ins', 'Gender': 'Neut'},
'NUM__Case=Loc': {POS: NUM, 'Case': 'Loc'},
'NUM__Case=Loc|Gender=Fem': {POS: NUM, 'Case': 'Loc', 'Gender': 'Fem'},
'NUM__Case=Loc|Gender=Masc': {POS: NUM, 'Case': 'Loc', 'Gender': 'Masc'},
'NUM__Case=Loc|Gender=Neut': {POS: NUM, 'Case': 'Loc', 'Gender': 'Neut'},
'NUM__Case=Nom': {POS: NUM, 'Case': 'Nom'},
'NUM__Case=Nom|Gender=Fem': {POS: NUM, 'Case': 'Nom', 'Gender': 'Fem'},
'NUM__Case=Nom|Gender=Masc': {POS: NUM, 'Case': 'Nom', 'Gender': 'Masc'},
'NUM__Case=Nom|Gender=Neut': {POS: NUM, 'Case': 'Nom', 'Gender': 'Neut'},
'NUM___': {POS: NUM},
'PART__Mood=Cnd': {POS: PART, 'Mood': 'Cnd'},
'PART__Polarity=Neg': {POS: PART, 'Polarity': 'Neg'},
'PART___': {POS: PART},
'PRON__Animacy=Anim|Case=Acc|Gender=Masc|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Acc|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Acc', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Dat|Gender=Masc|Number=Sing': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'PRON__Animacy=Anim|Case=Dat|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Dat', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Gen|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Gen', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Ins|Gender=Masc|Number=Sing': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'PRON__Animacy=Anim|Case=Ins|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Ins', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Loc|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Loc', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Nom|Gender=Masc|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'PRON__Animacy=Anim|Case=Nom|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Case': 'Nom', 'Number': 'Plur'},
'PRON__Animacy=Anim|Gender=Masc|Number=Plur': {POS: PRON, 'Animacy': 'Anim', 'Gender': 'Masc', 'Number': 'Plur'},
'PRON__Animacy=Inan|Case=Acc|Gender=Masc|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Acc|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Dat|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Gen|Gender=Masc|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Gen|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Ins|Gender=Fem|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Ins|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Loc|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Case=Nom|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Animacy=Inan|Gender=Neut|Number=Sing': {POS: PRON, 'Animacy': 'Inan', 'Gender': 'Neut', 'Number': 'Sing'},
'PRON__Case=Acc': {POS: PRON, 'Case': 'Acc'},
'PRON__Case=Acc|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Acc|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Acc|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Acc|Number=Plur|Person=1': {POS: PRON, 'Case': 'Acc', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Acc|Number=Plur|Person=2': {POS: PRON, 'Case': 'Acc', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Acc|Number=Plur|Person=3': {POS: PRON, 'Case': 'Acc', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Acc|Number=Sing|Person=1': {POS: PRON, 'Case': 'Acc', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Acc|Number=Sing|Person=2': {POS: PRON, 'Case': 'Acc', 'Number': 'Sing', 'Person': '2'},
'PRON__Case=Dat': {POS: PRON, 'Case': 'Dat'},
'PRON__Case=Dat|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Dat|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Dat|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Dat|Number=Plur|Person=1': {POS: PRON, 'Case': 'Dat', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Dat|Number=Plur|Person=2': {POS: PRON, 'Case': 'Dat', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Dat|Number=Plur|Person=3': {POS: PRON, 'Case': 'Dat', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Dat|Number=Sing|Person=1': {POS: PRON, 'Case': 'Dat', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Dat|Number=Sing|Person=2': {POS: PRON, 'Case': 'Dat', 'Number': 'Sing', 'Person': '2'},
'PRON__Case=Gen': {POS: PRON, 'Case': 'Gen'},
'PRON__Case=Gen|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Gen|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Gen|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Gen|Number=Plur|Person=1': {POS: PRON, 'Case': 'Gen', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Gen|Number=Plur|Person=2': {POS: PRON, 'Case': 'Gen', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Gen|Number=Plur|Person=3': {POS: PRON, 'Case': 'Gen', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Gen|Number=Sing|Person=1': {POS: PRON, 'Case': 'Gen', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Gen|Number=Sing|Person=2': {POS: PRON, 'Case': 'Gen', 'Number': 'Sing', 'Person': '2'},
'PRON__Case=Ins': {POS: PRON, 'Case': 'Ins'},
'PRON__Case=Ins|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Ins|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Ins|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Ins|Number=Plur|Person=1': {POS: PRON, 'Case': 'Ins', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Ins|Number=Plur|Person=2': {POS: PRON, 'Case': 'Ins', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Ins|Number=Plur|Person=3': {POS: PRON, 'Case': 'Ins', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Ins|Number=Sing|Person=1': {POS: PRON, 'Case': 'Ins', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Ins|Number=Sing|Person=2': {POS: PRON, 'Case': 'Ins', 'Number': 'Sing', 'Person': '2'},
'PRON__Case=Loc': {POS: PRON, 'Case': 'Loc'},
'PRON__Case=Loc|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Loc|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Loc|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Loc|Number=Plur|Person=1': {POS: PRON, 'Case': 'Loc', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Loc|Number=Plur|Person=2': {POS: PRON, 'Case': 'Loc', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Loc|Number=Plur|Person=3': {POS: PRON, 'Case': 'Loc', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Loc|Number=Sing|Person=1': {POS: PRON, 'Case': 'Loc', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Loc|Number=Sing|Person=2': {POS: PRON, 'Case': 'Loc', 'Number': 'Sing', 'Person': '2'},
'PRON__Case=Nom': {POS: PRON, 'Case': 'Nom'},
'PRON__Case=Nom|Gender=Fem|Number=Sing|Person=3': {POS: PRON, 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Nom|Gender=Masc|Number=Sing|Person=3': {POS: PRON, 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Nom|Gender=Neut|Number=Sing|Person=3': {POS: PRON, 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Person': '3'},
'PRON__Case=Nom|Number=Plur|Person=1': {POS: PRON, 'Case': 'Nom', 'Number': 'Plur', 'Person': '1'},
'PRON__Case=Nom|Number=Plur|Person=2': {POS: PRON, 'Case': 'Nom', 'Number': 'Plur', 'Person': '2'},
'PRON__Case=Nom|Number=Plur|Person=3': {POS: PRON, 'Case': 'Nom', 'Number': 'Plur', 'Person': '3'},
'PRON__Case=Nom|Number=Sing|Person=1': {POS: PRON, 'Case': 'Nom', 'Number': 'Sing', 'Person': '1'},
'PRON__Case=Nom|Number=Sing|Person=2': {POS: PRON, 'Case': 'Nom', 'Number': 'Sing', 'Person': '2'},
'PRON__Number=Sing|Person=1': {POS: PRON, 'Number': 'Sing', 'Person': '1'},
'PRON___': {POS: PRON},
'PROPN__Animacy=Anim|Case=Acc|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Acc|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Acc|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Acc|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Acc|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Dat|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Dat|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Dat|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Dat|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Dat|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Gen|Foreign=Yes|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Gen', 'Foreign': 'Yes', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Gen|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Gen|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Gen|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Gen|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Ins|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Ins|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Ins|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Ins|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Loc|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Loc|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Loc|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Nom|Foreign=Yes|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Foreign': 'Yes', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Nom|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Nom|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Nom|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Nom|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Case=Nom|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Anim|Case=Voc|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Case': 'Voc', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Anim|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Anim', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Acc|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Acc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Acc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Dat|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Dat|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Dat', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Gen|Foreign=Yes|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Foreign': 'Yes', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Gen|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Gen|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Gen', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Ins|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Ins|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Ins', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Loc|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Loc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Loc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Nom|Foreign=Yes|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Foreign': 'Yes', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Foreign=Yes|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Foreign': 'Yes', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Foreign=Yes|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Foreign': 'Yes', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Fem|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Fem|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Neut|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Nom|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Case=Nom|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Nom', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Case=Par|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Case': 'Par', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Gender=Fem': {POS: PROPN, 'Animacy': 'Inan', 'Gender': 'Fem'},
'PROPN__Animacy=Inan|Gender=Masc': {POS: PROPN, 'Animacy': 'Inan', 'Gender': 'Masc'},
'PROPN__Animacy=Inan|Gender=Masc|Number=Plur': {POS: PROPN, 'Animacy': 'Inan', 'Gender': 'Masc', 'Number': 'Plur'},
'PROPN__Animacy=Inan|Gender=Masc|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Animacy=Inan|Gender=Neut|Number=Sing': {POS: PROPN, 'Animacy': 'Inan', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Case=Acc|Degree=Pos|Gender=Fem|Number=Sing': {POS: PROPN, 'Case': 'Acc', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Case=Dat|Degree=Pos|Gender=Masc|Number=Sing': {POS: PROPN, 'Case': 'Dat', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Case=Ins|Degree=Pos|Gender=Fem|Number=Sing': {POS: PROPN, 'Case': 'Ins', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Case=Ins|Degree=Pos|Number=Plur': {POS: PROPN, 'Case': 'Ins', 'Degree': 'Pos', 'Number': 'Plur'},
'PROPN__Case=Nom|Degree=Pos|Gender=Fem|Number=Sing': {POS: PROPN, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Fem', 'Number': 'Sing'},
'PROPN__Case=Nom|Degree=Pos|Gender=Masc|Number=Sing': {POS: PROPN, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Masc', 'Number': 'Sing'},
'PROPN__Case=Nom|Degree=Pos|Gender=Neut|Number=Sing': {POS: PROPN, 'Case': 'Nom', 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing'},
'PROPN__Case=Nom|Degree=Pos|Number=Plur': {POS: PROPN, 'Case': 'Nom', 'Degree': 'Pos', 'Number': 'Plur'},
'PROPN__Degree=Pos|Gender=Neut|Number=Sing|Variant=Short': {POS: PROPN, 'Degree': 'Pos', 'Gender': 'Neut', 'Number': 'Sing', 'Variant': 'Short'},
'PROPN__Degree=Pos|Number=Plur|Variant=Short': {POS: PROPN, 'Degree': 'Pos', 'Number': 'Plur', 'Variant': 'Short'},
'PROPN__Foreign=Yes': {POS: PROPN, 'Foreign': 'Yes'},
'PROPN__Number=Sing': {POS: PROPN, 'Number': 'Sing'},
'PROPN___': {POS: PROPN},
'PUNCT___': {POS: PUNCT},
'SCONJ__Mood=Cnd': {POS: SCONJ, 'Mood': 'Cnd'},
'SCONJ___': {POS: SCONJ},
'SYM___': {POS: SYM},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Anim|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Anim', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Imp|Case=Acc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Imp', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Animacy=Inan|Aspect=Perf|Case=Acc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Animacy': 'Inan', 'Aspect': 'Perf', 'Case': 'Acc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Acc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Acc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Dat|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Gen|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Ins|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Ins|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Ins|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Ins|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Ins|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Ins|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Ins|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Loc|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Gender=Fem|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Gender=Masc|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Gender=Neut|Number=Sing|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Case=Nom|Number=Plur|Tense=Pres|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Pres', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Fem|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Fem|Number=Sing|Tense=Pres|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Pres', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Masc|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Masc|Number=Sing|Tense=Pres|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Pres', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Neut|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Gender=Neut|Number=Sing|Tense=Pres|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Pres', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Mood=Imp|Number=Plur|Person=2|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Imp|Number=Plur|Person=2|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Imp|Number=Sing|Person=2|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Sing', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Imp|Number=Sing|Person=2|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Imp', 'Number': 'Sing', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '1', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '2', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Pres', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Number=Plur|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Number': 'Plur', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Number=Plur|Tense=Pres|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Number': 'Plur', 'Tense': 'Pres', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Imp|Tense=Past|VerbForm=Conv|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Tense': 'Past', 'VerbForm': 'Conv', 'Voice': 'Act'},
'VERB__Aspect=Imp|Tense=Pres|VerbForm=Conv|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'Tense': 'Pres', 'VerbForm': 'Conv', 'Voice': 'Act'},
'VERB__Aspect=Imp|Tense=Pres|VerbForm=Conv|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'Tense': 'Pres', 'VerbForm': 'Conv', 'Voice': 'Mid'},
'VERB__Aspect=Imp|Tense=Pres|VerbForm=Conv|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'Tense': 'Pres', 'VerbForm': 'Conv', 'Voice': 'Pass'},
'VERB__Aspect=Imp|VerbForm=Inf|Voice=Act': {POS: VERB, 'Aspect': 'Imp', 'VerbForm': 'Inf', 'Voice': 'Act'},
'VERB__Aspect=Imp|VerbForm=Inf|Voice=Mid': {POS: VERB, 'Aspect': 'Imp', 'VerbForm': 'Inf', 'Voice': 'Mid'},
'VERB__Aspect=Imp|VerbForm=Inf|Voice=Pass': {POS: VERB, 'Aspect': 'Imp', 'VerbForm': 'Inf', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Acc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Acc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Acc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Dat|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Dat|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Dat|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Dat|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Dat|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Dat|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Dat|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Dat|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Dat', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Gen|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Gen|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Gen|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Gen|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Gen|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Gen|Number=Plur|Tense=Fut|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Fut', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Gen|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Gen', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Ins|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Ins|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Ins|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Ins|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Ins|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Ins|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Ins|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Ins|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Ins|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Ins|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Ins|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Ins|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Ins', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Loc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Loc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Loc|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Loc|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Loc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Loc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Loc|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Loc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Loc', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Nom|Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Nom|Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Nom|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Nom|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Nom|Gender=Neut|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Act'},
'VERB__Aspect=Perf|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Case=Nom|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Case': 'Nom', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Fem', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Gender=Fem|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Fem', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Masc', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Gender=Masc|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Masc', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Gender=Neut|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Neut', 'Mood': 'Ind', 'Number': 'Sing', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Gender=Neut|Number=Sing|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Gender': 'Neut', 'Number': 'Sing', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Mood=Imp|Number=Plur|Person=1|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '1', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Imp|Number=Plur|Person=2|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Imp|Number=Plur|Person=2|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Imp', 'Number': 'Plur', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Imp|Number=Sing|Person=2|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Imp', 'Number': 'Sing', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Imp|Number=Sing|Person=2|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Imp', 'Number': 'Sing', 'Person': '2', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '1', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '1', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '2', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '2', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Person': '3', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Plur|Tense=Past|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Plur', 'Tense': 'Past', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=1|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '1', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=1|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '1', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=2|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '2', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=2|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '2', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Act'},
'VERB__Aspect=Perf|Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Mood': 'Ind', 'Number': 'Sing', 'Person': '3', 'Tense': 'Fut', 'VerbForm': 'Fin', 'Voice': 'Mid'},
'VERB__Aspect=Perf|Number=Plur|Tense=Past|Variant=Short|VerbForm=Part|Voice=Pass': {POS: VERB, 'Aspect': 'Perf', 'Number': 'Plur', 'Tense': 'Past', 'Variant': 'Short', 'VerbForm': 'Part', 'Voice': 'Pass'},
'VERB__Aspect=Perf|Tense=Past|VerbForm=Conv|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'Tense': 'Past', 'VerbForm': 'Conv', 'Voice': 'Act'},
'VERB__Aspect=Perf|Tense=Past|VerbForm=Conv|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'Tense': 'Past', 'VerbForm': 'Conv', 'Voice': 'Mid'},
'VERB__Aspect=Perf|VerbForm=Inf|Voice=Act': {POS: VERB, 'Aspect': 'Perf', 'VerbForm': 'Inf', 'Voice': 'Act'},
'VERB__Aspect=Perf|VerbForm=Inf|Voice=Mid': {POS: VERB, 'Aspect': 'Perf', 'VerbForm': 'Inf', 'Voice': 'Mid'},
'VERB__Voice=Act': {POS: VERB, 'Voice': 'Act'},
'VERB___': {POS: VERB},
'X__Foreign=Yes': {POS: X, 'Foreign': 'Yes'},
'X___': {POS: X},
}
| 160.001366
| 261
| 0.624397
| 16,006
| 117,121
| 4.477446
| 0.004998
| 0.113024
| 0.111015
| 0.084391
| 0.987595
| 0.972246
| 0.967
| 0.898404
| 0.874529
| 0.63782
| 0
| 0.001779
| 0.102458
| 117,121
| 731
| 262
| 160.220246
| 0.679969
| 0.000102
| 0
| 0
| 0
| 0.414835
| 0.633463
| 0.357417
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.002747
| 0
| 0.002747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
13aa2544e109ac5c75ea4af8976cf0443bfe346f
| 2,001
|
py
|
Python
|
test/test_include_exclude.py
|
vrajat/dbcat
|
62e00e1f901dfdcbbd48ce67e55ce10f25a27414
|
[
"MIT"
] | 8
|
2021-01-08T11:15:53.000Z
|
2021-11-19T13:07:40.000Z
|
test/test_include_exclude.py
|
vrajat/dbcat
|
62e00e1f901dfdcbbd48ce67e55ce10f25a27414
|
[
"MIT"
] | 48
|
2020-12-23T05:09:13.000Z
|
2021-12-19T06:24:53.000Z
|
test/test_include_exclude.py
|
vrajat/dbcat
|
62e00e1f901dfdcbbd48ce67e55ce10f25a27414
|
[
"MIT"
] | 2
|
2020-12-11T05:11:02.000Z
|
2021-07-17T15:41:20.000Z
|
from dbcat.api import scan_sources
def test_pull_include_table(setup_catalog_and_data):
catalog = setup_catalog_and_data
scan_sources(catalog, ["sqlite_db"], include_table_regex=["full.*"])
with catalog.managed_session:
source = catalog.get_source("sqlite_db")
assert source is not None
schemata = source.schemata
assert len(schemata) == 1
tables = schemata[0].tables
assert len(tables) == 1
assert tables[0].name == "full_pii"
def test_pull_include_table_list(setup_catalog_and_data):
catalog = setup_catalog_and_data
scan_sources(catalog, ["sqlite_db"], include_table_regex=["full.*", "partial.*"])
with catalog.managed_session:
source = catalog.get_source("sqlite_db")
assert source is not None
schemata = source.schemata
assert len(schemata) == 1
tables = schemata[0].tables
assert len(tables) == 2
assert tables[0].name == "full_pii"
assert tables[1].name == "partial_pii"
def test_pull_exclude_table(setup_catalog_and_data):
catalog = setup_catalog_and_data
scan_sources(catalog, ["pg"], exclude_table_regex=["full.*", "partial.*"])
with catalog.managed_session:
source = catalog.get_source("pg")
assert source is not None
schemata = source.schemata
assert len(schemata) == 1
tables = schemata[0].tables
assert len(tables) == 1
assert tables[0].name == "no_pii"
def test_pull_exclude_table_list(setup_catalog_and_data):
catalog = setup_catalog_and_data
scan_sources(catalog, ["pg"], exclude_table_regex=["full.*"])
with catalog.managed_session:
source = catalog.get_source("pg")
assert source is not None
schemata = source.schemata
assert len(schemata) == 1
tables = schemata[0].tables
assert len(tables) == 2
assert tables[0].name == "no_pii"
assert tables[1].name == "partial_pii"
| 27.791667
| 85
| 0.656672
| 254
| 2,001
| 4.901575
| 0.161417
| 0.077108
| 0.096386
| 0.122088
| 0.976707
| 0.947791
| 0.904418
| 0.856225
| 0.856225
| 0.856225
| 0
| 0.011757
| 0.234883
| 2,001
| 71
| 86
| 28.183099
| 0.801437
| 0
| 0
| 0.808511
| 0
| 0
| 0.067966
| 0
| 0
| 0
| 0
| 0
| 0.382979
| 1
| 0.085106
| false
| 0
| 0.021277
| 0
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b96c639afc717cea9960c0953c14bc8fa4ca2950
| 210
|
py
|
Python
|
backend/product/admin.py
|
Qlas/ztziwb
|
24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03
|
[
"MIT"
] | null | null | null |
backend/product/admin.py
|
Qlas/ztziwb
|
24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03
|
[
"MIT"
] | null | null | null |
backend/product/admin.py
|
Qlas/ztziwb
|
24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Cart, CartProduct, Category, Product
admin.site.register(Category)
admin.site.register(Product)
admin.site.register(CartProduct)
admin.site.register(Cart)
| 23.333333
| 56
| 0.819048
| 28
| 210
| 6.142857
| 0.428571
| 0.209302
| 0.395349
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080952
| 210
| 8
| 57
| 26.25
| 0.891192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b99e5fcf43b816a2c4c6a137c3fed0ebbfca10a5
| 11,899
|
py
|
Python
|
pieces.py
|
AhanM/ChessAI
|
010b743d6625ade1a192eac4c4fcf013a559251f
|
[
"MIT"
] | 1
|
2021-05-05T03:37:58.000Z
|
2021-05-05T03:37:58.000Z
|
pieces.py
|
AhanM/ChessAI
|
010b743d6625ade1a192eac4c4fcf013a559251f
|
[
"MIT"
] | null | null | null |
pieces.py
|
AhanM/ChessAI
|
010b743d6625ade1a192eac4c4fcf013a559251f
|
[
"MIT"
] | null | null | null |
import util
from action import Action
class Piece(object):
"""
Piece object shall help represent properties of chess pieces on the board
Properties shall most definitely include:
- Position
- Color
-
Functions should be:
- valid moves
- attacks / captures
- String Representation
"""
def __init__(self, color, (x,y), index):
super(Piece, self).__init__()
self.color = color
self.pos = self.x, self.y = x,y
self.index = index
self.lastpos = None
def validMoves(self, config):
moves = []
return moves
def toString(self):
return "X"
class Pawn(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 1
def toString(self):
return "P"
def validMoves(self, config):
valid_moves = []
x,y = self.pos
self.player_pieces_pos = config.getPlayerPositions(self.color)
self.enemy_pieces_pos = config.getEnemyPositions(self.color)
self.enemy_pieces = config.getEnemyPieces(self.color)
if self.color == "White":
# moving pawn one tile in front
if util.isNotOutofBounds(x,y+1) and (x,y+1) not in self.player_pieces_pos+self.enemy_pieces_pos:
valid_moves.append((x,y+1))
# moving pawn two tiles in front
if y == 1 and (x,y+1) not in self.player_pieces_pos+self.enemy_pieces_pos and (x,y+2) not in self.player_pieces_pos+self.enemy_pieces_pos:
valid_moves.append((x,y+2))
# Attacking diagonally 1 -> Normal Attack
if util.isNotOutofBounds((x+1), (y+1)) and ((x+1), (y+1)) in self.enemy_pieces_pos:
valid_moves.append((x+1,y+1))
if util.isNotOutofBounds((x-1), (y+1)) and ((x-1), (y+1)) in self.enemy_pieces_pos:
valid_moves.append((x-1, y+1))
# Attacking diagonally 2 -> en passant
if util.isNotOutofBounds((x+1),y) and ((x+1),y) in self.enemy_pieces_pos:
index = util.getIndexof(self.enemy_pieces_pos, (x+1,y))
enemyPiece = self.enemy_pieces[index]
if enemyPiece.toString() == "P" and abs(enemyPiece.lastpos[1] - enemyPiece.pos[1]) == 2:
valid_moves.append((x+1,y+1))
if util.isNotOutofBounds((x-1),y) and ((x-1),y) in self.enemy_pieces_pos:
index = util.getIndexof(self.enemy_pieces_pos, (x-1,y))
enemyPiece = self.enemy_pieces[index]
if enemyPiece.toString() == "P" and abs(enemyPiece.lastpos[1] - enemyPiece.pos[1]) == 2:
valid_moves.append((x-1,y+1))
else:
# moving pawn one tile in front
if util.isNotOutofBounds(x,y-1) and (x,y-1) not in self.player_pieces_pos+self.enemy_pieces_pos:
valid_moves.append((x,y-1))
# moving pawn two tiles in front
if y == 6 and (x,y-1) not in self.player_pieces_pos+self.enemy_pieces_pos and (x,y-2) not in self.player_pieces_pos+self.enemy_pieces_pos:
valid_moves.append((x,y-2))
# Attacking diagonally 1 -> Normal Attack
if util.isNotOutofBounds((x-1), (y-1)) and ((x-1), (y-1)) in self.enemy_pieces_pos:
valid_moves.append((x-1,y-1))
if util.isNotOutofBounds((x+1), (y-1)) and ((x+1), (y-1)) in self.enemy_pieces_pos:
valid_moves.append((x+1, y-1))
# Attacking diagonally 2 -> en passant
if util.isNotOutofBounds((x+1),y) and ((x+1),y) in self.enemy_pieces_pos:
index = util.getIndexof(self.enemy_pieces_pos, (x+1,y))
enemyPiece = self.enemy_pieces[index]
if enemyPiece.toString() == "P" and abs(enemyPiece.lastpos[1] - enemyPiece.pos[1]) == 2:
valid_moves.append((x+1,y-1))
if util.isNotOutofBounds((x-1),y) and ((x-1),y) in self.enemy_pieces_pos:
index = util.getIndexof(self.enemy_pieces_pos, (x-1,y))
enemyPiece = self.enemy_pieces[index]
if enemyPiece.toString() == "P" and abs(enemyPiece.lastpos[1] - enemyPiece.pos[1]) == 2:
valid_moves.append((x-1,y-1))
for (newx,newy) in valid_moves:
# Check for collisions
action = Action(self, (newx,newy), config)
if not action.isValid(): valid_moves.remove((newx,newy))
# Pawn Promotion?
if newy == 7 or newy == 0:
action.promotion = True
return valid_moves
class Knight(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 3
def toString(self):
return "N"
def validMoves(self, config):
self.player_pieces_pos = config.getPlayerPositions(self.color)
self.enemy_pieces_pos = config.getEnemyPositions(self.color)
valid_moves = []
x,y = self.pos
# +2, +1 | +2, -1 | -2 + 1 | -2, -1 | +1,+2 | +1, -2| -1, +2| -1,-2
incx = 1
incy = 2
for i in range(4):
for j in range(2):
if util.isNotOutofBounds(x+incx, y+incy):
valid_moves.append((x+incx, y+incy))
incx = -1 * incx
incy = -1 * incy
if i == 1: incx, incy = incy, incx
# Check for collisions
for (newx,newy) in valid_moves:
action = Action(self, (newx,newy), config)
if not action.isValid(): valid_moves.remove((newx,newy))
return valid_moves
class Bishop(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 3
def toString(self):
return "B"
def validMoves(self, config):
self.player_pieces_pos = config.getPlayerPositions(self.color)
self.enemy_pieces_pos = config.getEnemyPositions(self.color)
x,y = self.pos
valid_moves = []
# left bottom - right top diagonal
for i in range(8):
if util.isNotOutofBounds(x+i, y+i):
# restricting movement because of other player pieces
if (x+i,y+i) in self.player_pieces_pos:
break
valid_moves.append((x+i,y+i))
# restricting movement because of enemy pieces
if (x+i,y+i) in self.enemy_pieces_pos:
break
for i in range(8):
if util.isNotOutofBounds(x-i, y-i):
# restricting movement because of other player pieces
if (x-i,y-i) in self.player_pieces_pos:
break
valid_moves.append((x-i, y-i))
# restricting movement because of enemy pieces
if (x+i,y+i) in self.enemy_pieces_pos:
break
# right botom - left top diagonal
for i in range(8):
if util.isNotOutofBounds(x-i, y+i):
# restricting movement because of other player pieces
if (x-i,y+i) in self.player_pieces_pos:
break
valid_moves.append((x-i,y+i))
# restricting movement because of enemy pieces
if (x-i,y+i) in self.enemy_pieces_pos:
break
for i in range(8):
if util.isNotOutofBounds(x+i, y-i):
# restricting movement because of other player pieces
if (x+i,y-i) in self.player_pieces_pos:
break
valid_moves.append((x+i, y-i))
# restricting movement because of enemy pieces
if (x+i,y-i) in self.enemy_pieces_pos:
break
for (x,y) in valid_moves:
# Check for collisions
action = Action(self, (x,y), config)
if not action.isValid(): valid_moves.remove((x,y))
return valid_moves
class Rook(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 5
def toString(self):
return "R"
def validMoves(self, config):
self.player_pieces_pos = config.getPlayerPositions(self.color)
self.enemy_pieces_pos = config.getEnemyPositions(self.color)
x,y = self.pos
valid_moves = []
# same row
for i in range(x-1,0,-1):
# restricting movement because of other player pieces
if (i,y) in self.player_pieces_pos:
break
valid_moves.append((i,y))
# restricting movement because of enemy pieces
if (i,y) in self.enemy_pieces_pos:
break
for i in range(x+1,8):
# restricting movement because of other player pieces
if (i,y) in self.player_pieces_pos+self.enemy_pieces_pos:
break
valid_moves.append((i,y))
# restricting movement because of enemy pieces
if (i,y) in self.enemy_pieces_pos:
break
# same column
for i in range(y-1,0,-1):
# restricting movement because of other player pieces
if (x,i) in self.player_pieces_pos:
break
valid_moves.append((x,i))
# restricting movement because of enemy pieces
if (x,i) in self.enemy_pieces_pos:
break
for i in range(y+1,8):
# restricting movement because of other pieces
if (x,i) in self.player_pieces_pos+self.enemy_pieces_pos:
break
valid_moves.append((x,i))
# restricting movement because of enemy pieces
if (x,i) in self.enemy_pieces_pos:
break
# for (x,y) in valid_moves:
# # Check for collisions
# action = Action(self, (x,y), self.player_pieces, self.enemy_pieces)
# if not action.isValid(): valid_moves.remove((x,y))
return valid_moves
class Queen(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 9
def toString(self):
return "Q"
def validMoves(self, config):
valid_moves = []
x,y = self.pos
self.player_pieces_pos = config.getPlayerPositions(self.color)
self.enemy_pieces_pos = config.getEnemyPositions(self.color)
# same row
for i in range(x-1,0,-1):
# restricting movement because of other player pieces
if (i,y) in self.player_pieces_pos:
break
valid_moves.append((i,y))
# restricting movement because of enemy pieces
if (i,y) in self.enemy_pieces_pos:
break
for i in range(x+1,8):
# restricting movement because of other player pieces
if (i,y) in self.player_pieces_pos+self.enemy_pieces_pos:
break
valid_moves.append((i,y))
# restricting movement because of enemy pieces
if (i,y) in self.enemy_pieces_pos:
break
# same column
for i in range(y-1,0,-1):
# restricting movement because of other player pieces
if (x,i) in self.player_pieces_pos:
break
valid_moves.append((x,i))
# restricting movement because of enemy pieces
if (x,i) in self.enemy_pieces_pos:
break
for i in range(y+1,8):
# restricting movement because of other pieces
if (x,i) in self.player_pieces_pos+self.enemy_pieces_pos:
break
valid_moves.append((x,i))
# restricting movement because of enemy pieces
if (x,i) in self.enemy_pieces_pos:
break
# left bottom - right top diagonal
for i in range(8):
if util.isNotOutofBounds(x+i, y+i):
# restricting movement because of other player pieces
if (x+i,y+i) in self.player_pieces_pos:
break
valid_moves.append((x+i,y+i))
# restricting movement because of enemy pieces
if (x+i,y+i) in self.enemy_pieces_pos:
break
for i in range(8):
if util.isNotOutofBounds(x-i, y-i):
# restricting movement because of other player pieces
if (x-i,y-i) in self.player_pieces_pos:
break
valid_moves.append((x-i, y-i))
# restricting movement because of enemy pieces
if (x+i,y+i) in self.enemy_pieces_pos:
break
# right botom - left top diagonal
for i in range(8):
if util.isNotOutofBounds(x-i, y+i):
# restricting movement because of other player pieces
if (x-i,y+i) in self.player_pieces_pos:
break
valid_moves.append((x-i,y+i))
# restricting movement because of enemy pieces
if (x-i,y+i) in self.enemy_pieces_pos:
break
for i in range(8):
if util.isNotOutofBounds(x+i, y-i):
# restricting movement because of other player pieces
if (x+i,y-i) in self.player_pieces_pos:
break
valid_moves.append((x+i, y-i))
# restricting movement because of enemy pieces
if (x+i,y-i) in self.enemy_pieces_pos:
break
# for (x,y) in valid_moves:
# # Check for collisions
# action = Action(self, (x,y), self.player_pieces, self.enemy_pieces)
# if not action.isValid(): valid_moves.remove((x,y))
return valid_moves
class King(Piece):
def __init__(self, color, (x,y), index):
Piece.__init__(self, color, (x,y), index)
self.points = 1000
def toString(self):
return "K"
def validMoves(self, config):
valid_moves = []
x,y = self.pos
final_pos = [(x+1,y),(x,y+1),(x+1,y+1),(x-1,y),(x,y-1),(x-1,y-1),(x+1,y-1),(x-1,y+1)]
for posx,posy in final_pos:
if util.isNotOutofBounds(posx, posy):
action = Action(self, (posx, posy), config)
if action.isValid():
valid_moves.append((posx,posy))
# Castling ?
return valid_moves
| 28.881068
| 141
| 0.675603
| 1,939
| 11,899
| 4.012378
| 0.067045
| 0.080977
| 0.096401
| 0.099486
| 0.88072
| 0.878406
| 0.871594
| 0.866581
| 0.86581
| 0.861954
| 0
| 0.015412
| 0.192957
| 11,899
| 412
| 142
| 28.881068
| 0.794752
| 0.208841
| 0
| 0.754941
| 0
| 0
| 0.001757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007905
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9eae3d0143d7c1cf195b1be1e764a3dd10886ff
| 134
|
py
|
Python
|
riot_transmute/v5/__init__.py
|
mrtolkien/riotwatcher_dto
|
777455c45159f177d3a7ba3956043ff26f625c30
|
[
"MIT"
] | 8
|
2020-10-01T18:01:15.000Z
|
2022-02-22T22:51:30.000Z
|
riot_transmute/v5/__init__.py
|
mrtolkien/riotwatcher_dto
|
777455c45159f177d3a7ba3956043ff26f625c30
|
[
"MIT"
] | 2
|
2021-12-15T23:02:02.000Z
|
2022-01-25T06:07:53.000Z
|
riot_transmute/v5/__init__.py
|
mrtolkien/riotwatcher_dto
|
777455c45159f177d3a7ba3956043ff26f625c30
|
[
"MIT"
] | 4
|
2020-07-30T23:24:06.000Z
|
2022-02-21T19:26:01.000Z
|
from riot_transmute.v5.match_to_game import match_to_game
from riot_transmute.v5.match_timeline_to_game import match_timeline_to_game
| 44.666667
| 75
| 0.910448
| 24
| 134
| 4.583333
| 0.375
| 0.218182
| 0.309091
| 0.345455
| 0.436364
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.059701
| 134
| 2
| 76
| 67
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dbeeba5f6b16adef7822aaf8cf2114cc4c2b3976
| 230,381
|
py
|
Python
|
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/v2016_10_01/aio/operations/_key_vault_client_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2019-08-23T21:14:00.000Z
|
2021-09-07T18:32:34.000Z
|
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/v2016_10_01/aio/operations/_key_vault_client_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/v2016_10_01/aio/operations/_key_vault_client_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 1
|
2019-04-05T18:17:43.000Z
|
2019-04-05T18:17:43.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KeyVaultClientOperationsMixin:
async def create_key(
self,
vault_base_url: str,
key_name: str,
parameters: "_models.KeyCreateParameters",
**kwargs
) -> "_models.KeyBundle":
"""Creates a new key, stores it, then returns key parameters and attributes to the client.
The create key operation can be used to create any key type in Azure Key Vault. If the named
key already exists, Azure Key Vault creates a new version of the key. It requires the
keys/create permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name for the new key. The system will generate the version name for the
new key.
:type key_name: str
:param parameters: The parameters to create a key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_key.metadata = {'url': '/keys/{key-name}/create'} # type: ignore
async def import_key(
self,
vault_base_url: str,
key_name: str,
parameters: "_models.KeyImportParameters",
**kwargs
) -> "_models.KeyBundle":
"""Imports an externally created key, stores it, and returns key parameters and attributes to the client.
The import key operation may be used to import any key type into an Azure Key Vault. If the
named key already exists, Azure Key Vault creates a new version of the key. This operation
requires the keys/import permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: Name for the imported key.
:type key_name: str
:param parameters: The parameters to import a key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyImportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.import_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyImportParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_key.metadata = {'url': '/keys/{key-name}'} # type: ignore
async def delete_key(
self,
vault_base_url: str,
key_name: str,
**kwargs
) -> "_models.DeletedKeyBundle":
"""Deletes a key of any type from storage in Azure Key Vault.
The delete key operation cannot be used to remove individual versions of a key. This operation
removes the cryptographic material associated with the key, which means the key is not usable
for Sign/Verify, Wrap/Unwrap or Encrypt/Decrypt operations. This operation requires the
keys/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to delete.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedKeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedKeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_key.metadata = {'url': '/keys/{key-name}'} # type: ignore
async def update_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyUpdateParameters",
**kwargs
) -> "_models.KeyBundle":
"""The update key operation changes specified attributes of a stored key and can be applied to any key type and key version stored in Azure Key Vault.
In order to perform this operation, the key must already exist in the Key Vault. Note: The
cryptographic material of a key itself cannot be changed. This operation requires the
keys/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of key to update.
:type key_name: str
:param key_version: The version of the key to update.
:type key_version: str
:param parameters: The parameters of the key to update.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore
async def get_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
**kwargs
) -> "_models.KeyBundle":
"""Gets the public part of a stored key.
The get key operation is applicable to all key types. If the requested key is symmetric, then
no key material is released in the response. This operation requires the keys/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to get.
:type key_name: str
:param key_version: Adding the version parameter retrieves a specific version of a key.
:type key_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore
def get_key_versions(
self,
vault_base_url: str,
key_name: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.KeyListResult"]:
"""Retrieves a list of individual key versions with the same key name.
The full key identifier, attributes, and tags are provided in the response. This operation
requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_key_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_key_versions.metadata = {'url': '/keys/{key-name}/versions'} # type: ignore
def get_keys(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.KeyListResult"]:
"""List keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the
public part of a stored key. The LIST operation is applicable to all key types, however only
the base key identifier, attributes, and tags are provided in the response. Individual versions
of a key are not listed in the response. This operation requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_keys.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_keys.metadata = {'url': '/keys'} # type: ignore
async def backup_key(
self,
vault_base_url: str,
key_name: str,
**kwargs
) -> "_models.BackupKeyResult":
"""Requests that a backup of the specified key be downloaded to the client.
The Key Backup operation exports a key from Azure Key Vault in a protected form. Note that this
operation does NOT return key material in a form that can be used outside the Azure Key Vault
system, the returned key material is either protected to a Azure Key Vault HSM or to Azure Key
Vault itself. The intent of this operation is to allow a client to GENERATE a key in one Azure
Key Vault instance, BACKUP the key, and then RESTORE it into another Azure Key Vault instance.
The BACKUP operation may be used to export, in protected form, any key type from Azure Key
Vault. Individual versions of a key cannot be backed up. BACKUP / RESTORE can be performed
within geographical boundaries only; meaning that a BACKUP from one geographical area cannot be
restored to another geographical area. For example, a backup from the US geographical area
cannot be restored in an EU geographical area. This operation requires the key/backup
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BackupKeyResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.BackupKeyResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupKeyResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.backup_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('BackupKeyResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
backup_key.metadata = {'url': '/keys/{key-name}/backup'} # type: ignore
async def restore_key(
self,
vault_base_url: str,
parameters: "_models.KeyRestoreParameters",
**kwargs
) -> "_models.KeyBundle":
"""Restores a backed up key to a vault.
Imports a previously backed up key into Azure Key Vault, restoring the key, its key identifier,
attributes and access control policies. The RESTORE operation may be used to import a
previously backed up key. Individual versions of a key cannot be restored. The key is restored
in its entirety with the same key name as it had when it was backed up. If the key name is not
available in the target Key Vault, the RESTORE operation will be rejected. While the key name
is retained during restore, the final key identifier will change if the key is restored to a
different vault. Restore will restore all versions and preserve version identifiers. The
RESTORE operation is subject to security constraints: The target Key Vault must be owned by the
same Microsoft Azure Subscription as the source Key Vault The user must have RESTORE permission
in the target Key Vault. This operation requires the keys/restore permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param parameters: The parameters to restore the key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyRestoreParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.restore_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyRestoreParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore_key.metadata = {'url': '/keys/restore'} # type: ignore
async def encrypt(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs
) -> "_models.KeyOperationResult":
"""Encrypts an arbitrary sequence of bytes using an encryption key that is stored in a key vault.
The ENCRYPT operation encrypts an arbitrary sequence of bytes using an encryption key that is
stored in Azure Key Vault. Note that the ENCRYPT operation only supports a single block of
data, the size of which is dependent on the target key and the encryption algorithm to be used.
The ENCRYPT operation is only strictly necessary for symmetric keys stored in Azure Key Vault
since protection with an asymmetric key can be performed using public portion of the key. This
operation is supported for asymmetric keys as a convenience for callers that have a key-
reference but do not have access to the public key material. This operation requires the
keys/encrypt permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the encryption operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.encrypt.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
encrypt.metadata = {'url': '/keys/{key-name}/{key-version}/encrypt'} # type: ignore
async def decrypt(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs
) -> "_models.KeyOperationResult":
"""Decrypts a single block of encrypted data.
The DECRYPT operation decrypts a well-formed block of ciphertext using the target encryption
key and specified algorithm. This operation is the reverse of the ENCRYPT operation; only a
single block of data may be decrypted, the size of this block is dependent on the target key
and the algorithm to be used. The DECRYPT operation applies to asymmetric and symmetric keys
stored in Azure Key Vault since it uses the private portion of the key. This operation requires
the keys/decrypt permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the decryption operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.decrypt.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
decrypt.metadata = {'url': '/keys/{key-name}/{key-version}/decrypt'} # type: ignore
async def sign(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeySignParameters",
**kwargs
) -> "_models.KeyOperationResult":
"""Creates a signature from a digest using the specified key.
The SIGN operation is applicable to asymmetric and symmetric keys stored in Azure Key Vault
since this operation uses the private portion of the key. This operation requires the keys/sign
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the signing operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeySignParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.sign.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeySignParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
sign.metadata = {'url': '/keys/{key-name}/{key-version}/sign'} # type: ignore
async def verify(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyVerifyParameters",
**kwargs
) -> "_models.KeyVerifyResult":
"""Verifies a signature using a specified key.
The VERIFY operation is applicable to symmetric keys stored in Azure Key Vault. VERIFY is not
strictly necessary for asymmetric keys stored in Azure Key Vault since signature verification
can be performed using the public portion of the key but this operation is supported as a
convenience for callers that only have a key-reference and not the public portion of the key.
This operation requires the keys/verify permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for verify operations.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyVerifyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyVerifyResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyVerifyResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyVerifyResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.verify.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyVerifyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyVerifyResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
verify.metadata = {'url': '/keys/{key-name}/{key-version}/verify'} # type: ignore
async def wrap_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs
) -> "_models.KeyOperationResult":
"""Wraps a symmetric key using a specified key.
The WRAP operation supports encryption of a symmetric key using a key encryption key that has
previously been stored in an Azure Key Vault. The WRAP operation is only strictly necessary for
symmetric keys stored in Azure Key Vault since protection with an asymmetric key can be
performed using the public portion of the key. This operation is supported for asymmetric keys
as a convenience for callers that have a key-reference but do not have access to the public key
material. This operation requires the keys/wrapKey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for wrap operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.wrap_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
wrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/wrapkey'} # type: ignore
async def unwrap_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs
) -> "_models.KeyOperationResult":
"""Unwraps a symmetric key using the specified key that was initially used for wrapping that key.
The UNWRAP operation supports decryption of a symmetric key using the target key encryption
key. This operation is the reverse of the WRAP operation. The UNWRAP operation applies to
asymmetric and symmetric keys stored in Azure Key Vault since it uses the private portion of
the key. This operation requires the keys/unwrapKey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the key operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.unwrap_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unwrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/unwrapkey'} # type: ignore
def get_deleted_keys(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.DeletedKeyListResult"]:
"""Lists the deleted keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the
public part of a deleted key. This operation includes deletion-specific information. The Get
Deleted Keys operation is applicable for vaults enabled for soft-delete. While the operation
can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled
vault. This operation requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedKeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedKeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_keys.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedKeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_keys.metadata = {'url': '/deletedkeys'} # type: ignore
async def get_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs
) -> "_models.DeletedKeyBundle":
"""Gets the public part of a deleted key.
The Get Deleted Key operation is applicable for soft-delete enabled vaults. While the operation
can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled
vault. This operation requires the keys/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedKeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedKeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore
async def purge_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs
) -> None:
"""Permanently deletes the specified key.
The Purge Deleted Key operation is applicable for soft-delete enabled vaults. While the
operation can be invoked on any vault, it will return an error if invoked on a non soft-delete
enabled vault. This operation requires the keys/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore
async def recover_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs
) -> "_models.KeyBundle":
"""Recovers the deleted key to its latest version.
The Recover Deleted Key operation is applicable for deleted keys in soft-delete enabled vaults.
It recovers the deleted key back to its latest version under /keys. An attempt to recover an
non-deleted key will return an error. Consider this the inverse of the delete operation on
soft-delete enabled vaults. This operation requires the keys/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the deleted key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_key.metadata = {'url': '/deletedkeys/{key-name}/recover'} # type: ignore
async def set_secret(
self,
vault_base_url: str,
secret_name: str,
parameters: "_models.SecretSetParameters",
**kwargs
) -> "_models.SecretBundle":
"""Sets a secret in a specified key vault.
The SET operation adds a secret to the Azure Key Vault. If the named secret already exists,
Azure Key Vault creates a new version of that secret. This operation requires the secrets/set
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param parameters: The parameters for setting the secret.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretSetParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretSetParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore
async def delete_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs
) -> "_models.DeletedSecretBundle":
"""Deletes a secret from a specified key vault.
The DELETE operation applies to any secret stored in Azure Key Vault. DELETE cannot be applied
to an individual version of a secret. This operation requires the secrets/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedSecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore
async def update_secret(
self,
vault_base_url: str,
secret_name: str,
secret_version: str,
parameters: "_models.SecretUpdateParameters",
**kwargs
) -> "_models.SecretBundle":
"""Updates the attributes associated with a specified secret in a given key vault.
The UPDATE operation changes specified attributes of an existing stored secret. Attributes that
are not specified in the request are left unchanged. The value of a secret itself cannot be
changed. This operation requires the secrets/set permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:param parameters: The parameters for update secret operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore
async def get_secret(
self,
vault_base_url: str,
secret_name: str,
secret_version: str,
**kwargs
) -> "_models.SecretBundle":
"""Get a specified secret from a given key vault.
The GET operation is applicable to any secret stored in Azure Key Vault. This operation
requires the secrets/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore
def get_secrets(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.SecretListResult"]:
"""List secrets in a specified key vault.
The Get Secrets operation is applicable to the entire vault. However, only the base secret
identifier and its attributes are provided in the response. Individual secret versions are not
listed in the response. This operation requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified, the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_secrets.metadata = {'url': '/secrets'} # type: ignore
def get_secret_versions(
self,
vault_base_url: str,
secret_name: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.SecretListResult"]:
"""List all versions of the specified secret.
The full secret identifier and attributes are provided in the response. No values are returned
for the secrets. This operations requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param maxresults: Maximum number of results to return in a page. If not specified, the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_secret_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_secret_versions.metadata = {'url': '/secrets/{secret-name}/versions'} # type: ignore
def get_deleted_secrets(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.DeletedSecretListResult"]:
"""Lists deleted secrets for the specified vault.
The Get Deleted Secrets operation returns the secrets that have been deleted for a vault
enabled for soft-delete. This operation requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedSecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedSecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedSecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_secrets.metadata = {'url': '/deletedsecrets'} # type: ignore
async def get_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs
) -> "_models.DeletedSecretBundle":
"""Gets the specified deleted secret.
The Get Deleted Secret operation returns the specified deleted secret along with its
attributes. This operation requires the secrets/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedSecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore
async def purge_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs
) -> None:
"""Permanently deletes the specified secret.
The purge deleted secret operation removes the secret permanently, without the possibility of
recovery. This operation can only be enabled on a soft-delete enabled vault. This operation
requires the secrets/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore
async def recover_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs
) -> "_models.SecretBundle":
"""Recovers the deleted secret to the latest version.
Recovers the deleted secret in the specified vault. This operation can only be performed on a
soft-delete enabled vault. This operation requires the secrets/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the deleted secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}/recover'} # type: ignore
async def backup_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs
) -> "_models.BackupSecretResult":
"""Backs up the specified secret.
Requests that a backup of the specified secret be downloaded to the client. All versions of the
secret will be downloaded. This operation requires the secrets/backup permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BackupSecretResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.BackupSecretResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupSecretResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.backup_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('BackupSecretResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
backup_secret.metadata = {'url': '/secrets/{secret-name}/backup'} # type: ignore
async def restore_secret(
self,
vault_base_url: str,
parameters: "_models.SecretRestoreParameters",
**kwargs
) -> "_models.SecretBundle":
"""Restores a backed up secret to a vault.
Restores a backed up secret, and all its versions, to a vault. This operation requires the
secrets/restore permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param parameters: The parameters to restore the secret.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretRestoreParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.restore_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretRestoreParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore_secret.metadata = {'url': '/secrets/restore'} # type: ignore
def get_certificates(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.CertificateListResult"]:
"""List certificates in a specified key vault.
The GetCertificates operation returns the set of certificates resources in the specified key
vault. This operation requires the certificates/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificates.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificates.metadata = {'url': '/certificates'} # type: ignore
async def delete_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.DeletedCertificateBundle":
"""Deletes a certificate from a specified key vault.
Deletes all versions of a certificate object along with its associated policy. Delete
certificate cannot be used to remove individual versions of a certificate object. This
operation requires the certificates/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedCertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate.metadata = {'url': '/certificates/{certificate-name}'} # type: ignore
async def set_certificate_contacts(
self,
vault_base_url: str,
contacts: "_models.Contacts",
**kwargs
) -> "_models.Contacts":
"""Sets the certificate contacts for the specified key vault.
Sets the certificate contacts for the specified key vault. This operation requires the
certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param contacts: The contacts for the key vault certificate.
:type contacts: ~azure.keyvault.v2016_10_01.models.Contacts
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(contacts, 'Contacts')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
async def get_certificate_contacts(
self,
vault_base_url: str,
**kwargs
) -> "_models.Contacts":
"""Lists the certificate contacts for a specified key vault.
The GetCertificateContacts operation returns the set of certificate contact resources in the
specified key vault. This operation requires the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
async def delete_certificate_contacts(
self,
vault_base_url: str,
**kwargs
) -> "_models.Contacts":
"""Deletes the certificate contacts for a specified key vault.
Deletes the certificate contacts for a specified key vault certificate. This operation requires
the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
def get_certificate_issuers(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.CertificateIssuerListResult"]:
"""List certificate issuers for a specified key vault.
The GetCertificateIssuers operation returns the set of certificate issuer resources in the
specified key vault. This operation requires the certificates/manageissuers/getissuers
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateIssuerListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateIssuerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateIssuerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificate_issuers.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateIssuerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificate_issuers.metadata = {'url': '/certificates/issuers'} # type: ignore
async def set_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
parameter: "_models.CertificateIssuerSetParameters",
**kwargs
) -> "_models.IssuerBundle":
"""Sets the specified certificate issuer.
The SetCertificateIssuer operation adds or updates the specified certificate issuer. This
operation requires the certificates/setissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param parameter: Certificate issuer set parameter.
:type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerSetParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameter, 'CertificateIssuerSetParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def update_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
parameter: "_models.CertificateIssuerUpdateParameters",
**kwargs
) -> "_models.IssuerBundle":
"""Updates the specified certificate issuer.
The UpdateCertificateIssuer operation performs an update on the specified certificate issuer
entity. This operation requires the certificates/setissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param parameter: Certificate issuer update parameter.
:type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameter, 'CertificateIssuerUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def get_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
**kwargs
) -> "_models.IssuerBundle":
"""Lists the specified certificate issuer.
The GetCertificateIssuer operation returns the specified certificate issuer resources in the
specified key vault. This operation requires the certificates/manageissuers/getissuers
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def delete_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
**kwargs
) -> "_models.IssuerBundle":
"""Deletes the specified certificate issuer.
The DeleteCertificateIssuer operation permanently removes the specified certificate issuer from
the vault. This operation requires the certificates/manageissuers/deleteissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def create_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateCreateParameters",
**kwargs
) -> "_models.CertificateOperation":
"""Creates a new certificate.
If this is the first version, the certificate resource is created. This operation requires the
certificates/create permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to create a certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_certificate.metadata = {'url': '/certificates/{certificate-name}/create'} # type: ignore
async def import_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateImportParameters",
**kwargs
) -> "_models.CertificateBundle":
"""Imports a certificate into a specified key vault.
Imports an existing valid certificate, containing a private key, into Azure Key Vault. The
certificate to be imported can be in either PFX or PEM format. If the certificate is in PEM
format the PEM file must contain the key as well as x509 certificates. This operation requires
the certificates/import permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to import the certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateImportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.import_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateImportParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_certificate.metadata = {'url': '/certificates/{certificate-name}/import'} # type: ignore
def get_certificate_versions(
self,
vault_base_url: str,
certificate_name: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.CertificateListResult"]:
"""List the versions of a certificate.
The GetCertificateVersions operation returns the versions of a certificate in the specified key
vault. This operation requires the certificates/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificate_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificate_versions.metadata = {'url': '/certificates/{certificate-name}/versions'} # type: ignore
async def get_certificate_policy(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.CertificatePolicy":
"""Lists the policy for a certificate.
The GetCertificatePolicy operation returns the specified certificate policy resources in the
specified key vault. This operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in a given key vault.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificatePolicy, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_policy.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificatePolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore
async def update_certificate_policy(
self,
vault_base_url: str,
certificate_name: str,
certificate_policy: "_models.CertificatePolicy",
**kwargs
) -> "_models.CertificatePolicy":
"""Updates the policy for a certificate.
Set specified members in the certificate policy. Leave others as null. This operation requires
the certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given vault.
:type certificate_name: str
:param certificate_policy: The policy for the certificate.
:type certificate_policy: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificatePolicy, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_policy.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_policy, 'CertificatePolicy')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificatePolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore
async def update_certificate(
self,
vault_base_url: str,
certificate_name: str,
certificate_version: str,
parameters: "_models.CertificateUpdateParameters",
**kwargs
) -> "_models.CertificateBundle":
"""Updates the specified attributes associated with the given certificate.
The UpdateCertificate operation applies the specified update on the given certificate; the only
elements updated are the certificate's attributes. This operation requires the
certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given key vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:param parameters: The parameters for certificate update.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore
async def get_certificate(
self,
vault_base_url: str,
certificate_name: str,
certificate_version: str,
**kwargs
) -> "_models.CertificateBundle":
"""Gets information about a certificate.
Gets information about a specific certificate. This operation requires the certificates/get
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore
async def update_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
certificate_operation: "_models.CertificateOperationUpdateParameter",
**kwargs
) -> "_models.CertificateOperation":
"""Updates a certificate operation.
Updates a certificate creation operation that is already in progress. This operation requires
the certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param certificate_operation: The certificate operation response.
:type certificate_operation: ~azure.keyvault.v2016_10_01.models.CertificateOperationUpdateParameter
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_operation, 'CertificateOperationUpdateParameter')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def get_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.CertificateOperation":
"""Gets the creation operation of a certificate.
Gets the creation operation associated with a specified certificate. This operation requires
the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def delete_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.CertificateOperation":
"""Deletes the creation operation for a specific certificate.
Deletes the creation operation for a specified certificate that is in the process of being
created. The certificate is no longer created. This operation requires the certificates/update
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def merge_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateMergeParameters",
**kwargs
) -> "_models.CertificateBundle":
"""Merges a certificate or a certificate chain with a key pair existing on the server.
The MergeCertificate operation performs the merging of a certificate or certificate chain with
a key pair currently available in the service. This operation requires the certificates/create
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to merge certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateMergeParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.merge_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateMergeParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
merge_certificate.metadata = {'url': '/certificates/{certificate-name}/pending/merge'} # type: ignore
def get_deleted_certificates(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.DeletedCertificateListResult"]:
"""Lists the deleted certificates in the specified vault currently available for recovery.
The GetDeletedCertificates operation retrieves the certificates in the current vault which are
in a deleted state and ready for recovery or purging. This operation includes deletion-specific
information. This operation requires the certificates/get/list permission. This operation can
only be enabled on soft-delete enabled vaults.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedCertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedCertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_certificates.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedCertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_certificates.metadata = {'url': '/deletedcertificates'} # type: ignore
async def get_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.DeletedCertificateBundle":
"""Retrieves information about the specified deleted certificate.
The GetDeletedCertificate operation retrieves the deleted certificate information plus its
attributes, such as retention interval, scheduled permanent deletion and the current deletion
recovery level. This operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedCertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore
async def purge_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> None:
"""Permanently deletes the specified deleted certificate.
The PurgeDeletedCertificate operation performs an irreversible deletion of the specified
certificate, without possibility for recovery. The operation is not available if the recovery
level does not specify 'Purgeable'. This operation requires the certificate/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore
async def recover_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs
) -> "_models.CertificateBundle":
"""Recovers the deleted certificate back to its current version under /certificates.
The RecoverDeletedCertificate operation performs the reversal of the Delete operation. The
operation is applicable in vaults enabled for soft-delete, and must be issued during the
retention interval (available in the deleted certificate's attributes). This operation requires
the certificates/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the deleted certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}/recover'} # type: ignore
def get_storage_accounts(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.StorageListResult"]:
"""List storage accounts managed by the specified key vault. This operation requires the
storage/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.StorageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_storage_accounts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StorageListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_storage_accounts.metadata = {'url': '/storage'} # type: ignore
async def delete_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
**kwargs
) -> "_models.StorageBundle":
"""Deletes a storage account. This operation requires the storage/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def get_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
**kwargs
) -> "_models.StorageBundle":
"""Gets information about a specified storage account. This operation requires the storage/get
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def set_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs
) -> "_models.StorageBundle":
"""Creates or updates a new storage account. This operation requires the storage/set permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to create a storage account.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def update_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountUpdateParameters",
**kwargs
) -> "_models.StorageBundle":
"""Updates the specified attributes associated with the given storage account. This operation
requires the storage/set/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to update a storage account.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def regenerate_storage_account_key(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountRegenerteKeyParameters",
**kwargs
) -> "_models.StorageBundle":
"""Regenerates the specified key value for the given storage account. This operation requires the
storage/regeneratekey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to regenerate storage account key.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountRegenerteKeyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.regenerate_storage_account_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountRegenerteKeyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_storage_account_key.metadata = {'url': '/storage/{storage-account-name}/regeneratekey'} # type: ignore
def get_sas_definitions(
self,
vault_base_url: str,
storage_account_name: str,
maxresults: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.SasDefinitionListResult"]:
"""List storage SAS definitions for the given storage account. This operation requires the
storage/listsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SasDefinitionListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SasDefinitionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_sas_definitions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SasDefinitionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_sas_definitions.metadata = {'url': '/storage/{storage-account-name}/sas'} # type: ignore
async def delete_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
**kwargs
) -> "_models.SasDefinitionBundle":
"""Deletes a SAS definition from a specified storage account. This operation requires the
storage/deletesas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def get_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
**kwargs
) -> "_models.SasDefinitionBundle":
"""Gets information about a SAS definition for the specified storage account. This operation
requires the storage/getsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def set_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
parameters: "_models.SasDefinitionCreateParameters",
**kwargs
) -> "_models.SasDefinitionBundle":
"""Creates or updates a new SAS definition for the specified storage account. This operation
requires the storage/setsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param parameters: The parameters to create a SAS definition.
:type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SasDefinitionCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def update_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
parameters: "_models.SasDefinitionUpdateParameters",
**kwargs
) -> "_models.SasDefinitionBundle":
"""Updates the specified attributes associated with the given SAS definition. This operation
requires the storage/setsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param parameters: The parameters to update a SAS definition.
:type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SasDefinitionUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
| 49.121748
| 158
| 0.665893
| 25,841
| 230,381
| 5.728068
| 0.0214
| 0.021828
| 0.029104
| 0.021585
| 0.920395
| 0.906715
| 0.895534
| 0.886866
| 0.879847
| 0.87049
| 0
| 0.012352
| 0.236747
| 230,381
| 4,689
| 159
| 49.132224
| 0.829435
| 0.098602
| 0
| 0.843447
| 0
| 0
| 0.119844
| 0.030569
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008618
| false
| 0
| 0.006104
| 0
| 0.070736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbf199243dc75aedfa375e6e1e97648bcb53ca1b
| 9,567
|
py
|
Python
|
tests/unit/test_decorators.py
|
haccht/scrapli
|
89589ee78c36296ee67813fcbedebee9b41b6bca
|
[
"MIT"
] | 1
|
2020-02-09T17:43:43.000Z
|
2020-02-09T17:43:43.000Z
|
tests/unit/test_decorators.py
|
haccht/scrapli
|
89589ee78c36296ee67813fcbedebee9b41b6bca
|
[
"MIT"
] | null | null | null |
tests/unit/test_decorators.py
|
haccht/scrapli
|
89589ee78c36296ee67813fcbedebee9b41b6bca
|
[
"MIT"
] | null | null | null |
import asyncio
import time
import pytest
from scrapli.decorators import ChannelTimeout, TimeoutOpsModifier, TransportTimeout
from scrapli.exceptions import ScrapliTimeout
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
def test_transport_timeout_sync_signals(monkeypatch, sync_transport_no_abc, test_data):
timeout_transport = test_data
sync_transport_no_abc._base_transport_args.timeout_transport = timeout_transport
@TransportTimeout()
def _open(cls):
return cls._base_transport_args.timeout_transport
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.sync_transport.Transport.open", _open)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert sync_transport_no_abc.open() == timeout_transport
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
def test_transport_timeout_sync_multiprocessing(monkeypatch, sync_transport_no_abc, test_data):
timeout_transport = test_data
sync_transport_no_abc._base_transport_args.timeout_transport = timeout_transport
@TransportTimeout()
def _open(cls):
return cls._base_transport_args.timeout_transport
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.sync_transport.Transport.open", _open)
# just patch _IS_WINDOWS to force using multiprocessing timeout
monkeypatch.setattr("scrapli.decorators._IS_WINDOWS", True)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert sync_transport_no_abc.open() == timeout_transport
def test_transport_timeout_sync_timed_out_signals(monkeypatch, sync_transport_no_abc):
sync_transport_no_abc._base_transport_args.timeout_transport = 0.1
@TransportTimeout()
def _open(cls):
time.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.sync_transport.Transport.open", _open)
with pytest.raises(ScrapliTimeout):
sync_transport_no_abc.open()
def test_transport_timeout_sync_timed_out_multiprocessing(monkeypatch, sync_transport_no_abc):
sync_transport_no_abc._base_transport_args.timeout_transport = 0.1
@TransportTimeout()
def _open(cls):
time.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.sync_transport.Transport.open", _open)
# just patch _IS_WINDOWS to force using multiprocessing timeout
monkeypatch.setattr("scrapli.decorators._IS_WINDOWS", True)
with pytest.raises(ScrapliTimeout):
sync_transport_no_abc.open()
async def test_transport_timeout_async_timed_out(monkeypatch, async_transport_no_abc):
async_transport_no_abc._base_transport_args.timeout_transport = 0.1
@TransportTimeout()
async def _open(cls):
await asyncio.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.async_transport.AsyncTransport.open", _open)
with pytest.raises(ScrapliTimeout):
await async_transport_no_abc.open()
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
async def test_transport_timeout_async(monkeypatch, async_transport_no_abc, test_data):
timeout_transport = test_data
async_transport_no_abc._base_transport_args.timeout_transport = timeout_transport
@TransportTimeout()
async def _open(cls):
return cls._base_transport_args.timeout_transport
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.transport.base.async_transport.AsyncTransport.open", _open)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert await async_transport_no_abc.open() == timeout_transport
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
def test_channel_timeout_sync_signals(monkeypatch, sync_channel, test_data):
timeout_ops = test_data
sync_channel._base_channel_args.timeout_ops = timeout_ops
@ChannelTimeout()
def _send_input(cls):
return cls._base_channel_args.timeout_ops
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert sync_channel.send_input() == timeout_ops
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
def test_channel_timeout_sync_multiprocessing(monkeypatch, sync_channel, test_data):
timeout_ops = test_data
sync_channel._base_channel_args.timeout_ops = timeout_ops
@ChannelTimeout()
def _send_input(cls):
return cls._base_channel_args.timeout_ops
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
# just patch _IS_WINDOWS to force using multiprocessing timeout
monkeypatch.setattr("scrapli.decorators._IS_WINDOWS", True)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert sync_channel.send_input() == timeout_ops
def test_channel_timeout_sync_timed_out_signals(monkeypatch, sync_channel):
sync_channel._base_channel_args.timeout_ops = 0.1
@ChannelTimeout()
def _send_input(cls):
time.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
with pytest.raises(ScrapliTimeout):
sync_channel.send_input()
def test_channel_timeout_sync_timed_out_multiprocessing(monkeypatch, sync_channel):
sync_channel._base_channel_args.timeout_ops = 0.1
@ChannelTimeout()
def _send_input(cls):
time.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
# just patch _IS_WINDOWS to force using multiprocessing timeout
monkeypatch.setattr("scrapli.decorators._IS_WINDOWS", True)
with pytest.raises(ScrapliTimeout):
sync_channel.send_input()
async def test_channel_timeout_async_timed_out(monkeypatch, async_channel):
async_channel._base_channel_args.timeout_ops = 0.1
@ChannelTimeout()
async def _send_input(cls):
await asyncio.sleep(0.5)
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.async_channel.AsyncChannel.send_input", _send_input)
with pytest.raises(ScrapliTimeout):
await async_channel.send_input()
@pytest.mark.parametrize(
"test_data",
(
1,
0,
),
ids=("timeout_operation", "timeout_disabled"),
)
async def test_channel_timeout_async(monkeypatch, async_channel, test_data):
timeout_ops = test_data
async_channel._base_channel_args.timeout_ops = timeout_ops
@ChannelTimeout()
async def _send_input(cls):
return cls._base_channel_args.timeout_ops
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.channel.async_channel.AsyncChannel.send_input", _send_input)
# simply running this to make sure it runs and allows the wrapped function to return nicely
assert await async_channel.send_input() == timeout_ops
@pytest.mark.parametrize(
"test_data",
(
999,
30,
None,
),
ids=("timeout_modified", "timeout_unchanged", "timeout_not_provided"),
)
def test_timeout_modifier(monkeypatch, sync_driver, test_data):
timeout_ops = test_data
assert sync_driver.timeout_ops == 30
@TimeoutOpsModifier()
def _test_timeout_modifier(cls, timeout_ops):
return cls.timeout_ops
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.driver.base.sync_driver.Driver.open", _test_timeout_modifier)
modified_timeout = sync_driver.open(timeout_ops=timeout_ops)
assert modified_timeout == timeout_ops if timeout_ops else 30
assert sync_driver.timeout_ops == 30
@pytest.mark.parametrize(
"test_data",
(
999,
30,
None,
),
ids=("timeout_modified", "timeout_unchanged", "timeout_not_provided"),
)
async def test_timeout_modifier_async(monkeypatch, async_driver, test_data):
timeout_ops = test_data
assert async_driver.timeout_ops == 30
@TimeoutOpsModifier()
async def _test_timeout_modifier(cls, timeout_ops):
return cls.timeout_ops
# stupid patch but does confirm the timeout modifier works as expected!
monkeypatch.setattr("scrapli.driver.base.async_driver.AsyncDriver.open", _test_timeout_modifier)
modified_timeout = await async_driver.open(timeout_ops=timeout_ops)
assert modified_timeout == timeout_ops if timeout_ops else 30
assert async_driver.timeout_ops == 30
| 33.21875
| 100
| 0.749869
| 1,216
| 9,567
| 5.581414
| 0.074836
| 0.053043
| 0.03713
| 0.03713
| 0.958155
| 0.935612
| 0.872256
| 0.839399
| 0.814056
| 0.780168
| 0
| 0.007327
| 0.172572
| 9,567
| 287
| 101
| 33.334495
| 0.850051
| 0.184697
| 0
| 0.743456
| 0
| 0
| 0.155113
| 0.106752
| 0
| 0
| 0
| 0
| 0.062827
| 1
| 0.094241
| false
| 0
| 0.026178
| 0.026178
| 0.162304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e043d9d332302ef1f04bf00e77ae0d7d37d48dc5
| 5,107
|
py
|
Python
|
tests/test_local_key.py
|
Jesse-Yung/jsonclasses
|
d40c52aec42bcb978a80ceb98b93ab38134dc790
|
[
"MIT"
] | 50
|
2021-08-18T08:08:04.000Z
|
2022-03-20T07:23:26.000Z
|
tests/test_local_key.py
|
Jesse-Yung/jsonclasses
|
d40c52aec42bcb978a80ceb98b93ab38134dc790
|
[
"MIT"
] | 1
|
2021-11-23T02:12:29.000Z
|
2021-11-23T13:35:26.000Z
|
tests/test_local_key.py
|
Jesse-Yung/jsonclasses
|
d40c52aec42bcb978a80ceb98b93ab38134dc790
|
[
"MIT"
] | 8
|
2021-07-01T02:39:15.000Z
|
2021-12-10T02:20:18.000Z
|
from __future__ import annotations
from unittest import TestCase
from tests.classes.local_key_author import LKAuthor, LKArticle
from tests.classes.linked_author import LinkedAuthor
from tests.classes.linked_article import LinkedArticle
class TestLocalKey(TestCase):
def test_local_key_can_be_accessed(self):
article = LKArticle(name='A')
self.assertEqual(article.author_id, None)
def test_set_object_modifies_local_key(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
self.assertEqual(article.author_id, author.id)
def test_set_local_key_resets_object(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
self.assertEqual(article.author, author)
self.assertEqual(author.articles[0], article)
article.author_id = 500
self.assertEqual(article.author, None)
self.assertEqual(author.articles, [])
def test_local_key_is_set_to_none_then_object_is_set_to_none(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
self.assertEqual(article.author, author)
self.assertEqual(author.articles[0], article)
article.author_id = None
self.assertEqual(article.author, None)
self.assertEqual(author.articles, [])
def test_object_is_set_to_none_then_local_key_is_set_to_none(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
self.assertEqual(article.author, author)
self.assertEqual(author.articles[0], article)
article.author = None
self.assertEqual(article.author_id, None)
self.assertEqual(author.articles, [])
def test_object_is_set_to_none_o_then_local_key_is_set_to_none(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
self.assertEqual(article.author, author)
self.assertEqual(author.articles[0], article)
author.articles = []
self.assertEqual(article.author_id, None)
self.assertEqual(author.articles, [])
def test_local_key_is_considered_modified_field(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
setattr(author, '_is_new', False)
setattr(article, '_is_new', False)
article.author = None
self.assertEqual(article.modified_fields, ('author',))
self.assertEqual(author.modified_fields, ('articles',))
def test_local_key_is_considered_modified_field_with_id_none_assign(self):
author = LKAuthor(name='A')
article = LKArticle(name='A', author=author)
setattr(author, '_is_new', False)
setattr(article, '_is_new', False)
article.author_id = None
self.assertEqual(article.modified_fields, ('author',))
self.assertEqual(author.modified_fields, ('articles',))
def test_local_key_is_considered_modified_field_with_id_assign(self):
article = LKArticle(name='A', author=None)
author = LKAuthor(name='A')
setattr(author, '_is_new', False)
setattr(article, '_is_new', False)
article.author_id = author.id
self.assertEqual(article.modified_fields, ('author',))
def test_local_key_is_considered_modified_field_even_id_is_none(self):
article = LinkedArticle(name='A', author=None)
author = LinkedAuthor(name='A')
setattr(author, '_is_new', False)
setattr(article, '_is_new', False)
article.author = author
self.assertEqual(article.author_id, None)
self.assertEqual(article.modified_fields, ('author',))
self.assertEqual(author.modified_fields, ('articles',))
def test_local_key_set_on_save_should_not_reset_value(self):
article = LinkedArticle(name='A', author=None)
article.author_id = 995
article._set_on_save()
self.assertEqual(article.author_id, 995)
def test_init_should_accept_local_key(self):
article = LKArticle(name='A', author_id=1005)
self.assertEqual(article.author, None)
self.assertEqual(article.author_id, 1005)
def test_set_should_accept_local_key(self):
article = LKArticle()
article.set(name='A', author_id=1005)
self.assertEqual(article.author, None)
self.assertEqual(article.author_id, 1005)
def test_update_should_accept_local_key(self):
article = LKArticle()
article.update(name='A', author_id=1005)
self.assertEqual(article.author, None)
self.assertEqual(article.author_id, 1005)
def test_init_should_accept_local_key_in_json_form(self):
article = LKArticle(**{'name': 'A', 'authorId': 1005})
self.assertEqual(article.author, None)
self.assertEqual(article.author_id, 1005)
def test_set_should_accept_local_key_in_json_form(self):
article = LKArticle()
article.set(**{'name': 'A', 'authorId': 1005})
self.assertEqual(article.author, None)
self.assertEqual(article.author_id, 1005)
| 41.185484
| 78
| 0.687488
| 628
| 5,107
| 5.30414
| 0.105096
| 0.166617
| 0.17172
| 0.184929
| 0.862504
| 0.833383
| 0.798559
| 0.751126
| 0.711198
| 0.704293
| 0
| 0.012981
| 0.200509
| 5,107
| 123
| 79
| 41.520325
| 0.802841
| 0
| 0
| 0.638095
| 0
| 0
| 0.029959
| 0
| 0
| 0
| 0
| 0
| 0.352381
| 1
| 0.152381
| false
| 0
| 0.047619
| 0
| 0.209524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e05400214f4df27feafda9946c9bd5525cdcbb5f
| 154
|
py
|
Python
|
integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra_tests/test_utils.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra_tests/test_utils.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra_tests/test_utils.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
from dagster_k8s_test_infra.integration_utils import get_test_namespace
def test_get_test_namespace():
assert isinstance(get_test_namespace(), str)
| 25.666667
| 71
| 0.844156
| 22
| 154
| 5.409091
| 0.636364
| 0.176471
| 0.403361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007194
| 0.097403
| 154
| 5
| 72
| 30.8
| 0.848921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e060e430df4160fd3ca39e6376498eab3d2e703e
| 149
|
py
|
Python
|
bitmovin_api_sdk/player/channels/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/player/channels/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/player/channels/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.player.channels.channels_api import ChannelsApi
from bitmovin_api_sdk.player.channels.versions.versions_api import VersionsApi
| 49.666667
| 78
| 0.899329
| 21
| 149
| 6.095238
| 0.47619
| 0.1875
| 0.234375
| 0.28125
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053691
| 149
| 2
| 79
| 74.5
| 0.907801
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0eb747f42c19a2172a4c9f616f808352025dd105
| 26,510
|
py
|
Python
|
PaddleSpeech/DeepVoice3/conversion/generate_name_map.py
|
ziyuli/models
|
6aaf01922f8b5c4be1f89a961ade92e2d7c11e3c
|
[
"Apache-2.0"
] | 1
|
2019-11-06T09:51:50.000Z
|
2019-11-06T09:51:50.000Z
|
PaddleSpeech/DeepVoice3/conversion/generate_name_map.py
|
ziyuli/models
|
6aaf01922f8b5c4be1f89a961ade92e2d7c11e3c
|
[
"Apache-2.0"
] | null | null | null |
PaddleSpeech/DeepVoice3/conversion/generate_name_map.py
|
ziyuli/models
|
6aaf01922f8b5c4be1f89a961ade92e2d7c11e3c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Generate a name map from configuration
"""
from collections import OrderedDict
import numpy as np
from argparse import ArgumentParser
from deepvoice3_paddle import frontend
from hparams import hparams, hparams_debug_string
def build_arg_parser():
parser = ArgumentParser(description="Train deepvoice 3 model.")
parser.add_argument(
"--preset",
type=str,
required=True,
help="Path of preset parameters in json format.")
parser.add_argument(
"--hparams",
type=str,
default="",
help="Hyper parameters to override preset.")
return parser
def gen_conv(in_channels, out_channels, kernel_size, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name):
# bias
torch_bias_name = "{}.bias".format(torch_base_name)
paddle_bias_name = "{}.b_0".format(paddle_base_name)
torch_bias_shape = (out_channels, )
paddle_bias_shape = [out_channels, ]
torch_state[torch_bias_name] = torch_bias_shape
paddle_state[paddle_bias_name] = paddle_bias_shape
name_map[torch_bias_name] = paddle_bias_name
# wg
torch_wg_name = "{}.weight_g".format(torch_base_name)
paddle_wg_name = "{}.w_1".format(paddle_base_name)
torch_wg_shape = (out_channels, 1, 1)
paddle_wg_shape = [out_channels, ]
torch_state[torch_wg_name] = torch_wg_shape
paddle_state[paddle_wg_name] = paddle_wg_shape
name_map[torch_wg_name] = paddle_wg_name
# wv
torch_wv_name = "{}.weight_v".format(torch_base_name)
paddle_wv_name = "{}.w_0".format(paddle_base_name)
torch_wv_shape = (out_channels, in_channels, kernel_size)
paddle_wv_shape = [out_channels, in_channels, 1, kernel_size]
torch_state[torch_wv_name] = torch_wv_shape
paddle_state[paddle_wv_name] = paddle_wv_shape
name_map[torch_wv_name] = paddle_wv_name
def gen_fc2conv(in_channels, out_channels, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name):
# bias
torch_bias_name = "{}.bias".format(torch_base_name)
paddle_bias_name = "{}.b_0".format(paddle_base_name)
torch_bias_shape = (out_channels, )
paddle_bias_shape = [out_channels, ]
torch_state[torch_bias_name] = torch_bias_shape
paddle_state[paddle_bias_name] = paddle_bias_shape
name_map[torch_bias_name] = paddle_bias_name
# wg
torch_wg_name = "{}.weight_g".format(torch_base_name)
paddle_wg_name = "{}.w_1".format(paddle_base_name)
torch_wg_shape = (out_channels, 1)
paddle_wg_shape = [out_channels, ]
torch_state[torch_wg_name] = torch_wg_shape
paddle_state[paddle_wg_name] = paddle_wg_shape
name_map[torch_wg_name] = paddle_wg_name
# wv
torch_wv_name = "{}.weight_v".format(torch_base_name)
paddle_wv_name = "{}.w_0".format(paddle_base_name)
torch_wv_shape = (out_channels, in_channels)
paddle_wv_shape = [out_channels, in_channels, 1, 1]
torch_state[torch_wv_name] = torch_wv_shape
paddle_state[paddle_wv_name] = paddle_wv_shape
name_map[torch_wv_name] = paddle_wv_name
def generate_name_map(name_scope):
TTS_model_idx = 0
prefix = "/".join([name_scope, "DeepVoiceTTS_{}".format(TTS_model_idx)])
_frontend = getattr(frontend, hparams.frontend)
torch_state = OrderedDict()
paddle_state = OrderedDict()
name_map = OrderedDict()
# text embedding
torch_name = "seq2seq.encoder.embed_tokens.weight"
paddle_name = "{}/ConvS2S_0/Encoder_0/Embedding_0.w_0".format(prefix)
torch_shape = (_frontend.n_vocab, hparams.text_embed_dim)
paddle_shape = [_frontend.n_vocab, hparams.text_embed_dim]
torch_state[torch_name] = torch_shape
paddle_state[paddle_name] = paddle_shape
name_map[torch_name] = paddle_name
# encoder
Conv1D_idx = 0
Conv1DGLU_idx = 0
if hparams.n_speakers > 1:
for i in [1, 2]:
torch_base_name = "seq2seq.encoder.speaker_fc{}".format(i)
paddle_base_name = "{}/ConvS2S_0/Encoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
Conv1D_idx += 1
gen_fc2conv(hparams.speaker_embed_dim, hparams.text_embed_dim,
torch_state, paddle_state, name_map, torch_base_name,
paddle_base_name)
# encoder convolution specs
encoder_channels = hparams.encoder_channels
kernel_size = hparams.kernel_size
h = encoder_channels
k = kernel_size
convolutions = [(h, k, 1), (h, k, 3), (h, k, 9), (h, k, 27), (h, k, 1),
(h, k, 3), (h, k, 9), (h, k, 27), (h, k, 1), (h, k, 3)]
torch_layer_idx = 0
in_channels = hparams.text_embed_dim
# first conv1d & conv1dglus
for out_channels, kernel_size, dilation in convolutions:
if in_channels != out_channels:
torch_base_name = "seq2seq.encoder.convolutions.{}".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Encoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, out_channels, 1, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
torch_base_name = "seq2seq.encoder.convolutions.{}.conv".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Encoder_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * out_channels, kernel_size, torch_state,
paddle_state, name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "seq2seq.encoder.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Encoder_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, out_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
in_channels = out_channels
# last conv1d
torch_base_name = "seq2seq.encoder.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Encoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, hparams.text_embed_dim, 1, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
# decoder
# position embeddings
PositionEmbedding_idx = 0
torch_name = "seq2seq.decoder.embed_query_positions.weight"
paddle_name = "{}/ConvS2S_0/Decoder_0/PositionEmbedding_{}/Embedding_0.w_0".format(
prefix, PositionEmbedding_idx)
torch_shape = (hparams.max_positions, hparams.decoder_channels)
paddle_shape = [hparams.max_positions, hparams.decoder_channels]
PositionEmbedding_idx += 1
torch_state[torch_name] = torch_shape
paddle_state[paddle_name] = paddle_shape
name_map[torch_name] = paddle_name
torch_name = "seq2seq.decoder.embed_keys_positions.weight"
paddle_name = "{}/ConvS2S_0/Decoder_0/PositionEmbedding_{}/Embedding_0.w_0".format(
prefix, PositionEmbedding_idx)
PositionEmbedding_idx += 1
torch_shape = (hparams.max_positions, hparams.text_embed_dim)
paddle_shape = [hparams.max_positions, hparams.text_embed_dim]
torch_state[torch_name] = torch_shape
paddle_state[paddle_name] = paddle_shape
name_map[torch_name] = paddle_name
Conv1D_idx = 0
Conv1DGLU_idx = 0
if hparams.n_speakers > 1:
for i in [1, 2]:
torch_base_name = "seq2seq.decoder.speaker_proj{}".format(i)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
Conv1D_idx += 1
gen_fc2conv(hparams.speaker_embed_dim, 1, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
# prenet
torch_layer_idx = 0
h = hparams.decoder_channels
k = hparams.kernel_size
prenet_convolutions = [(h, k, 1), (h, k, 3)]
in_channels = hparams.num_mels * hparams.outputs_per_step
for out_channels, kernel_size, dilation in prenet_convolutions:
if in_channels != out_channels:
torch_base_name = "seq2seq.decoder.preattention.{}".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, out_channels, 1, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
torch_base_name = "seq2seq.decoder.preattention.{}.conv".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * out_channels, kernel_size, torch_state,
paddle_state, name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "seq2seq.decoder.preattention.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, out_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
in_channels = out_channels
# conv & attn
torch_layer_idx = 0
convolutions = [(h, k, 1), (h, k, 3), (h, k, 9), (h, k, 27), (h, k, 1)]
for out_channels, kernel_size, dilation in convolutions:
if in_channels != out_channels:
torch_base_name = "seq2seq.decoder.convolutions.{}".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, out_channels, kernel_size, in_channels,
out_channels, kernel_size, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
torch_base_name = "seq2seq.decoder.convolutions.{}.conv".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * out_channels, kernel_size, torch_state,
paddle_state, name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "seq2seq.decoder.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, out_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
in_channels = out_channels
# attention
attention = [True, False, False, False, True]
AttentionLayer_idx = 0
parts = [
"query", "key" if hparams.key_projection else None, "value"
if hparams.value_projection else None, "out"
]
parts = [x for x in parts if x is not None]
for (i, (attn, (out_channels, kernel_size,
dilation))) in enumerate(zip(attention, convolutions)):
if attn is False:
in_channels = out_channels
continue
for ipart, part in enumerate(parts):
torch_base_name = "seq2seq.decoder.attention.{}.{}_projection".format(
i, part)
paddle_base_name = "{}/ConvS2S_0/Decoder_0/AttentionLayer_{}/Conv1D_{}/Conv2D_0".format(
prefix, AttentionLayer_idx, ipart)
if part == "query":
C_in = out_channels
C_out = hparams.text_embed_dim
elif part == "out":
C_in = hparams.text_embed_dim
C_out = out_channels
else:
C_in = hparams.text_embed_dim
C_out = hparams.text_embed_dim
gen_fc2conv(C_in, C_out, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name)
in_channels = out_channels
AttentionLayer_idx += 1
# last conv
torch_base_name = "seq2seq.decoder.last_conv"
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, hparams.num_mels * hparams.outputs_per_step, 1,
torch_state, paddle_state, name_map, torch_base_name,
paddle_base_name)
Conv1D_idx += 1
# for done output
torch_base_name = "seq2seq.decoder.fc"
paddle_base_name = "{}/ConvS2S_0/Decoder_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_fc2conv(hparams.num_mels * hparams.outputs_per_step, 1, torch_state,
paddle_state, name_map, torch_base_name, paddle_base_name)
# converter
# time_upsampling
if hparams.use_decoder_state_for_postnet_input:
in_dim = hparams.decoder_channels // hparams.outputs_per_step
else:
in_dim = hparams.num_mels
h = hparams.converter_channels
time_upsampling = max(hparams.downsample_step // hparams.outputs_per_step,
1)
assert time_upsampling == hparams.downsample_step, "implementation difference occured"
assert time_upsampling in [1, 2, 4], "other values not supported yet"
torch_layer_idx = 0
Conv1D_idx = 0
Conv1DGLU_idx = 0
Conv1DTranspose_idx = 0
h = hparams.converter_channels
k = hparams.kernel_size
postnet_convolutions = [(h, k, 1), (h, k, 3), (2 * h, k, 1), (2 * h, k, 3)]
in_channels = postnet_convolutions[0][0]
if time_upsampling == 4:
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_dim, in_channels, 1, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1D_idx += 1
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DTranspose_{}/Conv2DTranspose_0".format(
prefix, Conv1DTranspose_idx)
gen_conv(in_channels, in_channels, 2, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1DTranspose_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DTranspose_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DTranspose_{}/Conv2DTranspose_0".format(
prefix, Conv1DTranspose_idx)
gen_conv(in_channels, in_channels, 2, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1DTranspose_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
elif time_upsampling == 2:
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_dim, in_channels, 1, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1D_idx += 1
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DTranspose_{}/Conv2DTranspose_0".format(
prefix, Conv1DTranspose_idx)
gen_conv(in_channels, in_channels, 2, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1DTranspose_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DTranspose_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
else:
assert time_upsampling == 1, "other values are not supported"
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_dim, in_channels, 1, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name)
torch_layer_idx += 1
Conv1D_idx += 1
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * in_channels, 3, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, in_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
for (out_channels, kernel_size, dilation) in postnet_convolutions:
if in_channels != out_channels:
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1D_{}/Conv2D_0".format(
prefix, Conv1D_idx)
gen_conv(in_channels, out_channels, 1, torch_state, paddle_state,
name_map, torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
torch_base_name = "postnet.convolutions.{}.conv".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_0/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_conv(in_channels, 2 * out_channels, kernel_size, torch_state,
paddle_state, name_map, torch_base_name, paddle_base_name)
if hparams.n_speakers > 1:
torch_base_name = "postnet.convolutions.{}.speaker_proj".format(
torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1DGLU_{}/Conv1D_1/Conv2D_0".format(
prefix, Conv1DGLU_idx)
gen_fc2conv(hparams.speaker_embed_dim, out_channels, torch_state,
paddle_state, name_map, torch_base_name,
paddle_base_name)
torch_layer_idx += 1
Conv1DGLU_idx += 1
in_channels = out_channels
# last conv
linear_dim = hparams.fft_size // 2 + 1
torch_base_name = "postnet.convolutions.{}".format(torch_layer_idx)
paddle_base_name = "{}/Converter_0/Conv1D_{}/Conv2D_0".format(prefix,
Conv1D_idx)
gen_conv(in_channels, linear_dim, 1, torch_state, paddle_state, name_map,
torch_base_name, paddle_base_name)
torch_layer_idx += 2
Conv1D_idx += 1
in_channels = out_channels
# speaker_embed
if hparams.n_speakers > 1:
torch_name = "embed_speakers.weight"
torch_shape = (hparams.n_speakers, hparams.speaker_embed_dim)
paddle_name = "{}/Embedding_0.w_0".format(prefix)
paddle_shape = [hparams.n_speakers, hparams.speaker_embed_dim]
torch_state[torch_name] = torch_shape
paddle_state[paddle_name] = paddle_shape
name_map[torch_name] = paddle_name
# for k, v in torch_state.items():
# print("{}\t{}".format(k, v))
# for k, v in paddle_state.items():
# print("{}\t{}".format(k, v))
for k in name_map:
assert np.prod(torch_state[k]) == np.prod(paddle_state[name_map[
k]]), "{} does not match".format(k)
print("{}\t{}\t{}".format(k, name_map[k], paddle_state[name_map[k]]))
if __name__ == "__main__":
parser = build_arg_parser()
args, _ = parser.parse_known_args()
if args.preset is not None:
with open(args.preset) as f:
hparams.parse_json(f.read())
# Override hyper parameters
hparams.parse(args.hparams)
# print(hparams_debug_string())
generate_name_map("dv3")
| 42.213376
| 100
| 0.651452
| 3,356
| 26,510
| 4.722586
| 0.070024
| 0.086819
| 0.070541
| 0.056344
| 0.817212
| 0.795571
| 0.778724
| 0.765727
| 0.742066
| 0.733485
| 0
| 0.026847
| 0.253904
| 26,510
| 627
| 101
| 42.280702
| 0.774458
| 0.039532
| 0
| 0.729124
| 0
| 0
| 0.143464
| 0.126977
| 0
| 0
| 0
| 0
| 0.008147
| 1
| 0.008147
| false
| 0
| 0.010183
| 0
| 0.020367
| 0.002037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0eec5bd2aa8e9bc5ed5238f506b3e59a094f4e01
| 214,506
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_yang_ns.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_yang_ns.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_yang_ns.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
BUNDLE_NAME = "cisco_ios_xr"
CAPABILITIES = {
"CISCO-ENTITY-FRU-CONTROL-MIB": "2003-11-24",
"Cisco-IOS-XR-Ethernet-SPAN-cfg": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-datatypes": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-oper": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg": "2015-11-09",
"Cisco-IOS-XR-Subscriber-infra-subdb-oper": "2018-09-28",
"Cisco-IOS-XR-Subscriber-infra-subdb-oper-sub1": "2018-09-28",
"Cisco-IOS-XR-Subscriber-infra-subdb-oper-sub2": "2018-09-28",
"Cisco-IOS-XR-aaa-aaacore-cfg": "2018-09-04",
"Cisco-IOS-XR-aaa-diameter-base-mib-cfg": "2015-11-09",
"Cisco-IOS-XR-aaa-diameter-cfg": "2019-09-20",
"Cisco-IOS-XR-aaa-diameter-oper": "2019-09-26",
"Cisco-IOS-XR-aaa-diameter-oper-sub1": "2019-09-26",
"Cisco-IOS-XR-aaa-li-cfg": "2015-11-09",
"Cisco-IOS-XR-aaa-lib-cfg": "2017-11-14",
"Cisco-IOS-XR-aaa-lib-datatypes": "2017-11-14",
"Cisco-IOS-XR-aaa-locald-admin-cfg": "2015-11-09",
"Cisco-IOS-XR-aaa-locald-cfg": "2018-06-04",
"Cisco-IOS-XR-aaa-locald-oper": "2015-11-09",
"Cisco-IOS-XR-aaa-locald-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-aaa-nacm-cfg": "2017-09-30",
"Cisco-IOS-XR-aaa-nacm-oper": "2015-11-09",
"Cisco-IOS-XR-aaa-nacm-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-aaa-protocol-radius-cfg": "2017-09-07",
"Cisco-IOS-XR-aaa-protocol-radius-oper": "2017-11-13",
"Cisco-IOS-XR-aaa-protocol-radius-oper-sub1": "2017-11-13",
"Cisco-IOS-XR-aaa-protocol-radius-oper-sub2": "2017-11-13",
"Cisco-IOS-XR-aaa-tacacs-cfg": "2017-09-07",
"Cisco-IOS-XR-aaa-tacacs-oper": "2015-11-09",
"Cisco-IOS-XR-aaa-tacacs-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-accounting-cfg": "2017-05-31",
"Cisco-IOS-XR-alarmgr-server-oper": "2015-11-09",
"Cisco-IOS-XR-alarmgr-server-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ascii-ltrace-oper": "2018-01-21",
"Cisco-IOS-XR-ascii-ltrace-oper-sub1": "2018-01-21",
"Cisco-IOS-XR-asic-errors-oper": "2017-09-07",
"Cisco-IOS-XR-asic-errors-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-asic-errors-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-asr9k-ep-port-mode-cfg": "2019-01-06",
"Cisco-IOS-XR-asr9k-fab-cfg": "2015-11-09",
"Cisco-IOS-XR-asr9k-fia-cfg": "2017-08-17",
"Cisco-IOS-XR-asr9k-fsi-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-fsi-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-ethctrl-cfg": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-ethctrl-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-ethctrl-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-fca-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-fca-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-fca-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-asr9k-lc-pwrglide-cfg": "2015-11-09",
"Cisco-IOS-XR-asr9k-lpts-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-lpts-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-netflow-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-netflow-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-netflow-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-asr9k-np-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-np-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-prm-cfg": "2017-11-29",
"Cisco-IOS-XR-asr9k-ptp-pd-cfg": "2017-05-20",
"Cisco-IOS-XR-asr9k-ptp-pd-oper": "2017-03-16",
"Cisco-IOS-XR-asr9k-ptp-pd-oper-sub1": "2017-03-16",
"Cisco-IOS-XR-asr9k-qos-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-qos-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-qos-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-asr9k-sc-diag-admin-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-sc-diag-admin-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-sc-diag-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-sc-diag-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-sc-envmon-admin-oper": "2017-01-19",
"Cisco-IOS-XR-asr9k-sc-envmon-admin-oper-sub1": "2017-01-19",
"Cisco-IOS-XR-asr9k-sc-envmon-oper": "2017-01-19",
"Cisco-IOS-XR-asr9k-sc-envmon-oper-sub1": "2017-01-19",
"Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper": "2018-06-28",
"Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper-sub1": "2018-06-28",
"Cisco-IOS-XR-asr9k-sc-invmgr-oper": "2018-06-28",
"Cisco-IOS-XR-asr9k-sc-invmgr-oper-sub1": "2018-06-28",
"Cisco-IOS-XR-asr9k-xbar-oper": "2015-11-09",
"Cisco-IOS-XR-asr9k-xbar-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-asr9k-xbar-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-asr9k-xbar-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-atm-common-datatypes": "2015-11-09",
"Cisco-IOS-XR-atm-vcm-cfg": "2015-11-09",
"Cisco-IOS-XR-atm-vcm-oper": "2017-09-07",
"Cisco-IOS-XR-atm-vcm-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-atm-vcm-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-atm-vcm-oper-sub3": "2017-09-07",
"Cisco-IOS-XR-atm-vcm-oper-sub4": "2017-09-07",
"Cisco-IOS-XR-bundlemgr-cfg": "2017-05-01",
"Cisco-IOS-XR-bundlemgr-oper": "2018-08-18",
"Cisco-IOS-XR-bundlemgr-oper-sub1": "2018-08-18",
"Cisco-IOS-XR-bundlemgr-oper-sub2": "2018-08-18",
"Cisco-IOS-XR-call-home-cfg": "2018-07-24",
"Cisco-IOS-XR-cdp-cfg": "2017-08-16",
"Cisco-IOS-XR-cdp-oper": "2015-07-30",
"Cisco-IOS-XR-cdp-oper-sub1": "2015-07-30",
"Cisco-IOS-XR-cfgmgr-rollback-act": "2016-04-17",
"Cisco-IOS-XR-clear-counters-act": "2018-02-14",
"Cisco-IOS-XR-clns-isis-cfg": "2019-03-15",
"Cisco-IOS-XR-clns-isis-datatypes": "2017-05-01",
"Cisco-IOS-XR-clns-isis-oper": "2019-02-23",
"Cisco-IOS-XR-clns-isis-oper-sub1": "2019-02-23",
"Cisco-IOS-XR-clns-isis-oper-sub2": "2019-02-23",
"Cisco-IOS-XR-clns-isis-oper-sub3": "2019-02-23",
"Cisco-IOS-XR-cmproxy-oper": "2015-11-09",
"Cisco-IOS-XR-cmproxy-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-common-acl-datatypes": "2015-11-09",
"Cisco-IOS-XR-config-cfgmgr-cfg": "2015-11-09",
"Cisco-IOS-XR-config-cfgmgr-exec-oper": "2017-09-07",
"Cisco-IOS-XR-config-cfgmgr-exec-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-config-cfgmgr-oper": "2015-11-09",
"Cisco-IOS-XR-config-mda-cfg": "2015-11-09",
"Cisco-IOS-XR-config-mibs-cfg": "2015-09-29",
"Cisco-IOS-XR-config-valid-ccv-cfg": "2015-11-09",
"Cisco-IOS-XR-config-valid-ccv-oper": "2015-11-09",
"Cisco-IOS-XR-config-valid-ccv-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-controller-ains-act": "2018-01-09",
"Cisco-IOS-XR-controller-odu-datatypes": "2015-11-09",
"Cisco-IOS-XR-controller-odu-oper": "2015-11-09",
"Cisco-IOS-XR-controller-odu-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-controller-optics-cfg": "2017-09-07",
"Cisco-IOS-XR-controller-optics-oper": "2017-09-07",
"Cisco-IOS-XR-controller-optics-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-controller-otu-cfg": "2017-05-01",
"Cisco-IOS-XR-controller-otu-oper": "2017-05-01",
"Cisco-IOS-XR-controller-otu-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-crypto-act": "2019-06-18",
"Cisco-IOS-XR-crypto-macsec-mka-cfg": "2015-11-09",
"Cisco-IOS-XR-crypto-macsec-mka-if-cfg": "2015-11-09",
"Cisco-IOS-XR-crypto-macsec-mka-oper": "2015-11-09",
"Cisco-IOS-XR-crypto-macsec-mka-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-crypto-macsec-secy-oper": "2015-11-09",
"Cisco-IOS-XR-crypto-macsec-secy-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-crypto-mibs-ipsecflowmon-cfg": "2015-11-09",
"Cisco-IOS-XR-crypto-sam-cfg": "2017-11-21",
"Cisco-IOS-XR-crypto-sam-oper": "2017-09-07",
"Cisco-IOS-XR-crypto-sam-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-crypto-ssh-cfg": "2019-03-28",
"Cisco-IOS-XR-crypto-ssh-oper": "2017-08-25",
"Cisco-IOS-XR-crypto-ssh-oper-sub1": "2017-08-25",
"Cisco-IOS-XR-crypto-ssh-oper-sub2": "2017-08-25",
"Cisco-IOS-XR-dot1x-cfg": "2015-11-09",
"Cisco-IOS-XR-dot1x-if-cfg": "2015-11-09",
"Cisco-IOS-XR-dot1x-oper": "2015-11-09",
"Cisco-IOS-XR-dot1x-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-drivers-icpe-ethernet-cfg": "2015-11-09",
"Cisco-IOS-XR-drivers-media-eth-act": "2018-02-12",
"Cisco-IOS-XR-drivers-media-eth-cfg": "2017-05-01",
"Cisco-IOS-XR-drivers-media-eth-oper": "2017-05-01",
"Cisco-IOS-XR-drivers-media-eth-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-drivers-mpa-infra-cfg": "2015-11-09",
"Cisco-IOS-XR-drivers-vpa-infra-cfg": "2015-11-09",
"Cisco-IOS-XR-dwdm-ui-cfg": "2017-05-01",
"Cisco-IOS-XR-dwdm-ui-oper": "2015-11-09",
"Cisco-IOS-XR-dwdm-ui-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-eigrp-cfg": "2018-06-15",
"Cisco-IOS-XR-eigrp-datatypes": "2018-04-05",
"Cisco-IOS-XR-eigrp-oper": "2018-04-05",
"Cisco-IOS-XR-eigrp-oper-sub1": "2018-04-05",
"Cisco-IOS-XR-es-ace-cfg": "2018-02-26",
"Cisco-IOS-XR-es-acl-cfg": "2018-02-26",
"Cisco-IOS-XR-es-acl-datatypes": "2017-05-01",
"Cisco-IOS-XR-es-acl-oper": "2017-05-01",
"Cisco-IOS-XR-es-acl-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-ethernet-cfm-cfg": "2018-07-27",
"Cisco-IOS-XR-ethernet-cfm-datatypes": "2015-11-09",
"Cisco-IOS-XR-ethernet-cfm-oper": "2018-12-20",
"Cisco-IOS-XR-ethernet-cfm-oper-sub1": "2018-12-20",
"Cisco-IOS-XR-ethernet-cfm-oper-sub2": "2018-12-20",
"Cisco-IOS-XR-ethernet-cfm-oper-sub3": "2018-12-20",
"Cisco-IOS-XR-ethernet-cfm-oper-sub4": "2018-12-20",
"Cisco-IOS-XR-ethernet-cfm-sat-cfg": "2015-11-09",
"Cisco-IOS-XR-ethernet-link-oam-cfg": "2015-11-09",
"Cisco-IOS-XR-ethernet-link-oam-oper": "2015-11-09",
"Cisco-IOS-XR-ethernet-link-oam-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ethernet-lldp-cfg": "2017-05-01",
"Cisco-IOS-XR-ethernet-lldp-oper": "2017-11-13",
"Cisco-IOS-XR-ethernet-lldp-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ethernet-lldp-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-evpn-oper": "2018-09-26",
"Cisco-IOS-XR-evpn-oper-sub1": "2018-09-26",
"Cisco-IOS-XR-fib-common-cfg": "2017-05-01",
"Cisco-IOS-XR-fib-common-oper": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub3": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub4": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub5": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub6": "2017-09-07",
"Cisco-IOS-XR-fib-common-oper-sub7": "2017-09-07",
"Cisco-IOS-XR-flashmib-cfg": "2015-12-15",
"Cisco-IOS-XR-flowspec-cfg": "2015-11-09",
"Cisco-IOS-XR-flowspec-oper": "2015-11-09",
"Cisco-IOS-XR-flowspec-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-fpd-infra-cfg": "2015-11-09",
"Cisco-IOS-XR-freqsync-cfg": "2017-09-30",
"Cisco-IOS-XR-freqsync-datatypes": "2015-11-09",
"Cisco-IOS-XR-freqsync-oper": "2017-10-20",
"Cisco-IOS-XR-freqsync-oper-sub1": "2017-10-20",
"Cisco-IOS-XR-freqsync-sat-cfg": "2015-11-09",
"Cisco-IOS-XR-group-cfg": "2016-04-29",
"Cisco-IOS-XR-ha-eem-cfg": "2015-07-30",
"Cisco-IOS-XR-ha-eem-policy-oper": "2016-02-05",
"Cisco-IOS-XR-ha-eem-policy-oper-sub1": "2016-02-05",
"Cisco-IOS-XR-hwmod-bcc-disable-cfg": "2015-11-09",
"Cisco-IOS-XR-hwmod-mpa-reload-act": "2016-06-30",
"Cisco-IOS-XR-icpe-infra-cfg": "2017-09-30",
"Cisco-IOS-XR-icpe-infra-oper": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub10": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub11": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub12": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub4": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub5": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub6": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub7": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub8": "2015-11-09",
"Cisco-IOS-XR-icpe-infra-oper-sub9": "2015-11-09",
"Cisco-IOS-XR-icpe-sdacp-oper": "2015-11-09",
"Cisco-IOS-XR-icpe-sdacp-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-icpe-sdacp-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-icpe-sdacp-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-iedge4710-cfg": "2017-09-07",
"Cisco-IOS-XR-iedge4710-oper": "2018-09-20",
"Cisco-IOS-XR-iedge4710-oper-sub1": "2018-09-20",
"Cisco-IOS-XR-iedge4710-oper-sub2": "2018-09-20",
"Cisco-IOS-XR-iedge4710-oper-sub3": "2018-09-20",
"Cisco-IOS-XR-ifmgr-cfg": "2017-09-07",
"Cisco-IOS-XR-ifmgr-oper": "2015-07-30",
"Cisco-IOS-XR-ifmgr-oper-sub1": "2015-07-30",
"Cisco-IOS-XR-ifmgr-oper-sub2": "2015-07-30",
"Cisco-IOS-XR-ikev2-cfg": "2015-11-09",
"Cisco-IOS-XR-ikev2-oper": "2015-11-09",
"Cisco-IOS-XR-ikev2-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-infra-alarm-logger-cfg": "2017-02-23",
"Cisco-IOS-XR-infra-alarm-logger-datatypes": "2015-01-07",
"Cisco-IOS-XR-infra-alarm-logger-oper": "2017-09-07",
"Cisco-IOS-XR-infra-alarm-logger-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-infra-ceredundancymib-cfg": "2015-01-07",
"Cisco-IOS-XR-infra-confcopymib-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-correlator-cfg": "2017-05-01",
"Cisco-IOS-XR-infra-correlator-oper": "2017-09-07",
"Cisco-IOS-XR-infra-correlator-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-infra-fti-cfg": "2017-11-13",
"Cisco-IOS-XR-infra-fti-oper": "2015-11-09",
"Cisco-IOS-XR-infra-fti-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-infra-infra-cfg": "2016-06-16",
"Cisco-IOS-XR-infra-infra-clock-linux-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-infra-locale-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-ltrace-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-notification-log-mib-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-nsr-cfg": "2017-06-27",
"Cisco-IOS-XR-infra-objmgr-cfg": "2017-05-01",
"Cisco-IOS-XR-infra-objmgr-oper": "2017-05-01",
"Cisco-IOS-XR-infra-objmgr-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-infra-placed-act": "2018-01-10",
"Cisco-IOS-XR-infra-policymgr-cfg": "2019-10-02",
"Cisco-IOS-XR-infra-policymgr-oper": "2018-01-17",
"Cisco-IOS-XR-infra-rcmd-cfg": "2017-10-15",
"Cisco-IOS-XR-infra-rcmd-oper": "2015-11-09",
"Cisco-IOS-XR-infra-rcmd-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-infra-rmf-oper": "2015-11-09",
"Cisco-IOS-XR-infra-rmf-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-infra-rsi-cfg": "2018-06-15",
"Cisco-IOS-XR-infra-rsi-oper": "2017-09-07",
"Cisco-IOS-XR-infra-rsi-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-infra-rsi-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-infra-rsi-subscriber-cfg": "2015-07-30",
"Cisco-IOS-XR-infra-rt-check-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-serg-cfg": "2018-01-31",
"Cisco-IOS-XR-infra-serg-oper": "2017-09-07",
"Cisco-IOS-XR-infra-serg-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-infra-serg-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-infra-sla-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-sla-datatypes": "2015-11-09",
"Cisco-IOS-XR-infra-sla-oper": "2015-11-09",
"Cisco-IOS-XR-infra-statsd-act": "2018-01-10",
"Cisco-IOS-XR-infra-statsd-cfg": "2017-05-01",
"Cisco-IOS-XR-infra-statsd-oper": "2015-11-09",
"Cisco-IOS-XR-infra-statsd-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-infra-syslog-cfg": "2017-10-31",
"Cisco-IOS-XR-infra-syslog-oper": "2018-02-23",
"Cisco-IOS-XR-infra-syslog-oper-sub1": "2018-02-22",
"Cisco-IOS-XR-infra-systemmib-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-tc-cfg": "2015-11-09",
"Cisco-IOS-XR-infra-tc-oper": "2019-06-17",
"Cisco-IOS-XR-infra-tc-oper-sub1": "2019-06-17",
"Cisco-IOS-XR-infra-xtc-agent-cfg": "2019-09-05",
"Cisco-IOS-XR-infra-xtc-agent-oper": "2019-09-09",
"Cisco-IOS-XR-infra-xtc-agent-oper-sub1": "2019-09-09",
"Cisco-IOS-XR-infra-xtc-agent-oper-sub2": "2019-09-09",
"Cisco-IOS-XR-infra-xtc-cfg": "2019-09-24",
"Cisco-IOS-XR-infra-xtc-oper": "2019-10-02",
"Cisco-IOS-XR-infra-xtc-oper-sub1": "2019-10-02",
"Cisco-IOS-XR-installmgr-admin-oper": "2015-11-09",
"Cisco-IOS-XR-installmgr-admin-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-installmgr-admin-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-installmgr-admin-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-invmgr-cfg": "2015-11-09",
"Cisco-IOS-XR-invmgr-oper": "2018-04-19",
"Cisco-IOS-XR-invmgr-oper-sub1": "2018-04-19",
"Cisco-IOS-XR-invmgr-oper-sub2": "2018-04-19",
"Cisco-IOS-XR-invmgr-oper-sub3": "2018-04-19",
"Cisco-IOS-XR-invmgr-oper-sub4": "2018-04-19",
"Cisco-IOS-XR-ip-bfd-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-bfd-oper": "2017-09-07",
"Cisco-IOS-XR-ip-bfd-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ip-daps-cfg": "2017-09-07",
"Cisco-IOS-XR-ip-daps-mib-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-daps-oper": "2015-11-09",
"Cisco-IOS-XR-ip-daps-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ip-domain-cfg": "2019-02-19",
"Cisco-IOS-XR-ip-domain-oper": "2017-05-01",
"Cisco-IOS-XR-ip-domain-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-ip-iarm-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-iarm-datatypes": "2015-01-07",
"Cisco-IOS-XR-ip-iarm-v4-oper": "2019-01-22",
"Cisco-IOS-XR-ip-iarm-v4-oper-sub1": "2019-01-22",
"Cisco-IOS-XR-ip-iarm-v6-oper": "2019-01-22",
"Cisco-IOS-XR-ip-iarm-v6-oper-sub1": "2019-01-22",
"Cisco-IOS-XR-ip-iarm-vrf-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-icmp-cfg": "2017-06-08",
"Cisco-IOS-XR-ip-iep-cfg": "2017-09-07",
"Cisco-IOS-XR-ip-iep-oper": "2017-09-07",
"Cisco-IOS-XR-ip-iep-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ip-mobileip-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-mobileip-oper": "2016-03-10",
"Cisco-IOS-XR-ip-mobileip-oper-sub1": "2016-03-10",
"Cisco-IOS-XR-ip-ntp-admin-oper": "2017-09-07",
"Cisco-IOS-XR-ip-ntp-admin-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ip-ntp-cfg": "2017-10-15",
"Cisco-IOS-XR-ip-ntp-oper": "2015-11-09",
"Cisco-IOS-XR-ip-ntp-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ip-pfilter-cfg": "2017-09-30",
"Cisco-IOS-XR-ip-pfilter-oper": "2015-11-09",
"Cisco-IOS-XR-ip-pfilter-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ip-pfilter-subscriber-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-raw-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-rib-cfg": "2017-07-31",
"Cisco-IOS-XR-ip-rib-ipv4-oper": "2019-01-11",
"Cisco-IOS-XR-ip-rib-ipv4-oper-sub1": "2019-01-11",
"Cisco-IOS-XR-ip-rib-ipv6-oper": "2019-01-11",
"Cisco-IOS-XR-ip-rib-ipv6-oper-sub1": "2019-01-11",
"Cisco-IOS-XR-ip-rip-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-rip-oper": "2015-11-09",
"Cisco-IOS-XR-ip-rip-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ip-rsvp-cfg": "2019-09-20",
"Cisco-IOS-XR-ip-rsvp-oper": "2017-09-07",
"Cisco-IOS-XR-ip-rsvp-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ip-sbfd-cfg": "2015-11-09",
"Cisco-IOS-XR-ip-sbfd-oper": "2017-09-07",
"Cisco-IOS-XR-ip-sbfd-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ip-static-cfg": "2018-09-21",
"Cisco-IOS-XR-ip-tcp-cfg": "2018-02-14",
"Cisco-IOS-XR-ip-tcp-oper": "2018-11-01",
"Cisco-IOS-XR-ip-tcp-oper-sub1": "2018-11-01",
"Cisco-IOS-XR-ip-tcp-oper-sub2": "2018-11-01",
"Cisco-IOS-XR-ip-tcp-oper-sub3": "2018-11-01",
"Cisco-IOS-XR-ip-tcp-oper-sub4": "2018-11-01",
"Cisco-IOS-XR-ip-tcp-oper-sub5": "2018-11-01",
"Cisco-IOS-XR-ip-udp-cfg": "2017-09-30",
"Cisco-IOS-XR-ip-udp-oper": "2018-08-09",
"Cisco-IOS-XR-ip-udp-oper-sub1": "2018-08-09",
"Cisco-IOS-XR-ip-udp-oper-sub2": "2018-08-09",
"Cisco-IOS-XR-ip-udp-oper-sub3": "2018-08-09",
"Cisco-IOS-XR-ip-udp-oper-sub4": "2018-08-09",
"Cisco-IOS-XR-ipv4-ace-cfg": "2018-05-08",
"Cisco-IOS-XR-ipv4-acl-cfg": "2018-05-08",
"Cisco-IOS-XR-ipv4-acl-datatypes": "2018-02-01",
"Cisco-IOS-XR-ipv4-acl-oper": "2017-05-01",
"Cisco-IOS-XR-ipv4-acl-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-ipv4-arp-act": "2018-10-08",
"Cisco-IOS-XR-ipv4-arp-cfg": "2017-05-01",
"Cisco-IOS-XR-ipv4-arp-oper": "2017-05-01",
"Cisco-IOS-XR-ipv4-arp-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-ipv4-arp-oper-sub2": "2017-05-01",
"Cisco-IOS-XR-ipv4-autorp-datatypes": "2015-11-09",
"Cisco-IOS-XR-ipv4-autorp-oper": "2015-11-09",
"Cisco-IOS-XR-ipv4-autorp-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ipv4-autorp-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-ipv4-bgp-act": "2016-10-12",
"Cisco-IOS-XR-ipv4-bgp-cfg": "2018-06-15",
"Cisco-IOS-XR-ipv4-bgp-datatypes": "2017-06-26",
"Cisco-IOS-XR-ipv4-bgp-oc-oper": "2017-09-07",
"Cisco-IOS-XR-ipv4-bgp-oc-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ipv4-bgp-oper": "2017-09-07",
"Cisco-IOS-XR-ipv4-bgp-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ipv4-cinetd-cfg": "2017-06-22",
"Cisco-IOS-XR-ipv4-dc-cfg": "2019-05-21",
"Cisco-IOS-XR-ipv4-dhcpd-cfg": "2019-07-19",
"Cisco-IOS-XR-ipv4-dhcpd-oper": "2019-06-25",
"Cisco-IOS-XR-ipv4-dhcpd-oper-sub1": "2019-06-25",
"Cisco-IOS-XR-ipv4-dhcpd-oper-sub2": "2019-06-25",
"Cisco-IOS-XR-ipv4-dhcpd-subscriber-cfg": "2017-09-07",
"Cisco-IOS-XR-ipv4-filesystems-cfg": "2017-11-28",
"Cisco-IOS-XR-ipv4-hsrp-cfg": "2017-11-05",
"Cisco-IOS-XR-ipv4-hsrp-oper": "2017-09-07",
"Cisco-IOS-XR-ipv4-hsrp-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ipv4-igmp-cfg": "2017-10-15",
"Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg": "2015-11-09",
"Cisco-IOS-XR-ipv4-igmp-oper": "2018-01-31",
"Cisco-IOS-XR-ipv4-igmp-oper-sub1": "2018-01-31",
"Cisco-IOS-XR-ipv4-io-cfg": "2018-01-11",
"Cisco-IOS-XR-ipv4-io-oper": "2018-08-01",
"Cisco-IOS-XR-ipv4-io-oper-sub1": "2018-08-01",
"Cisco-IOS-XR-ipv4-io-oper-sub2": "2018-08-01",
"Cisco-IOS-XR-ipv4-ma-cfg": "2015-07-30",
"Cisco-IOS-XR-ipv4-ma-oper": "2018-08-01",
"Cisco-IOS-XR-ipv4-ma-oper-sub1": "2018-08-01",
"Cisco-IOS-XR-ipv4-ma-subscriber-cfg": "2015-07-30",
"Cisco-IOS-XR-ipv4-mfwd-cfg": "2017-10-15",
"Cisco-IOS-XR-ipv4-msdp-cfg": "2017-10-15",
"Cisco-IOS-XR-ipv4-ospf-act": "2016-09-14",
"Cisco-IOS-XR-ipv4-ospf-cfg": "2018-05-14",
"Cisco-IOS-XR-ipv4-ospf-oper": "2018-04-18",
"Cisco-IOS-XR-ipv4-ospf-oper-sub1": "2018-04-18",
"Cisco-IOS-XR-ipv4-ospf-oper-sub2": "2018-04-18",
"Cisco-IOS-XR-ipv4-ospf-oper-sub3": "2018-04-18",
"Cisco-IOS-XR-ipv4-pim-cfg": "2017-10-15",
"Cisco-IOS-XR-ipv4-pim-oper": "2017-09-07",
"Cisco-IOS-XR-ipv4-pim-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ipv4-pim-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-ipv4-ping-act": "2016-08-20",
"Cisco-IOS-XR-ipv4-smiap-cfg": "2016-07-04",
"Cisco-IOS-XR-ipv4-telnet-cfg": "2015-11-09",
"Cisco-IOS-XR-ipv4-telnet-mgmt-cfg": "2015-11-09",
"Cisco-IOS-XR-ipv4-traceroute-act": "2016-09-17",
"Cisco-IOS-XR-ipv4-vrrp-cfg": "2018-05-19",
"Cisco-IOS-XR-ipv4-vrrp-oper": "2017-09-07",
"Cisco-IOS-XR-ipv4-vrrp-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ipv6-ace-cfg": "2018-04-03",
"Cisco-IOS-XR-ipv6-acl-cfg": "2018-04-03",
"Cisco-IOS-XR-ipv6-acl-datatypes": "2017-05-01",
"Cisco-IOS-XR-ipv6-acl-oper": "2017-05-01",
"Cisco-IOS-XR-ipv6-acl-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-ipv6-io-cfg": "2016-05-10",
"Cisco-IOS-XR-ipv6-io-oper": "2015-11-09",
"Cisco-IOS-XR-ipv6-io-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ipv6-ma-cfg": "2017-05-01",
"Cisco-IOS-XR-ipv6-ma-oper": "2018-08-01",
"Cisco-IOS-XR-ipv6-ma-oper-sub1": "2018-08-01",
"Cisco-IOS-XR-ipv6-ma-subscriber-cfg": "2017-01-11",
"Cisco-IOS-XR-ipv6-nd-cfg": "2017-05-01",
"Cisco-IOS-XR-ipv6-nd-oper": "2019-09-26",
"Cisco-IOS-XR-ipv6-nd-oper-sub1": "2019-09-26",
"Cisco-IOS-XR-ipv6-nd-subscriber-cfg": "2016-12-19",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-cfg": "2017-09-12",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-oper": "2018-10-09",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-oper-sub1": "2018-10-09",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg": "2017-09-30",
"Cisco-IOS-XR-ipv6-ospfv3-act": "2016-09-14",
"Cisco-IOS-XR-ipv6-ospfv3-cfg": "2018-05-14",
"Cisco-IOS-XR-ipv6-ospfv3-oper": "2018-06-15",
"Cisco-IOS-XR-ipv6-ospfv3-oper-sub1": "2018-06-15",
"Cisco-IOS-XR-ipv6-ping-act": "2016-09-13",
"Cisco-IOS-XR-ipv6-smiap-cfg": "2016-07-04",
"Cisco-IOS-XR-ipv6-traceroute-act": "2016-09-17",
"Cisco-IOS-XR-isis-act": "2016-06-30",
"Cisco-IOS-XR-kim-tpa-cfg": "2018-07-27",
"Cisco-IOS-XR-l2-eth-infra-cfg": "2018-06-15",
"Cisco-IOS-XR-l2-eth-infra-datatypes": "2015-11-09",
"Cisco-IOS-XR-l2-eth-infra-oper": "2015-11-09",
"Cisco-IOS-XR-l2-eth-infra-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-l2-eth-infra-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-l2-eth-infra-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-l2rib-cfg": "2018-09-26",
"Cisco-IOS-XR-l2rib-oper": "2015-11-09",
"Cisco-IOS-XR-l2rib-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-l2vpn-cfg": "2018-06-15",
"Cisco-IOS-XR-l2vpn-oper": "2019-03-30",
"Cisco-IOS-XR-l2vpn-oper-sub1": "2019-03-30",
"Cisco-IOS-XR-l2vpn-oper-sub2": "2019-03-30",
"Cisco-IOS-XR-l2vpn-oper-sub3": "2019-03-30",
"Cisco-IOS-XR-l2vpn-oper-sub4": "2019-03-30",
"Cisco-IOS-XR-li-cfg": "2015-11-09",
"Cisco-IOS-XR-lib-keychain-act": "2017-04-17",
"Cisco-IOS-XR-lib-keychain-cfg": "2018-07-16",
"Cisco-IOS-XR-lib-keychain-masterkey-aes-cfg": "2017-09-07",
"Cisco-IOS-XR-lib-keychain-oper": "2018-01-31",
"Cisco-IOS-XR-lib-keychain-oper-sub1": "2018-01-31",
"Cisco-IOS-XR-lib-mpp-cfg": "2018-07-23",
"Cisco-IOS-XR-lib-mpp-oper": "2017-05-01",
"Cisco-IOS-XR-lib-mpp-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-linux-os-heap-summary-oper": "2015-11-09",
"Cisco-IOS-XR-linux-os-heap-summary-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-linux-os-reboot-history-oper": "2015-11-09",
"Cisco-IOS-XR-linux-os-reboot-history-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-lmp-cfg": "2015-11-09",
"Cisco-IOS-XR-lmp-datatypes": "2015-11-09",
"Cisco-IOS-XR-lmp-oper": "2015-11-09",
"Cisco-IOS-XR-lmp-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-lpts-ifib-oper": "2018-12-17",
"Cisco-IOS-XR-lpts-ifib-oper-sub1": "2018-12-17",
"Cisco-IOS-XR-lpts-lib-cfg": "2015-11-09",
"Cisco-IOS-XR-lpts-pa-oper": "2015-11-09",
"Cisco-IOS-XR-lpts-pa-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-lpts-pre-ifib-cfg": "2019-10-23",
"Cisco-IOS-XR-lpts-pre-ifib-oper": "2019-11-06",
"Cisco-IOS-XR-lpts-pre-ifib-oper-sub1": "2019-11-06",
"Cisco-IOS-XR-lpts-punt-flowtrap-cfg": "2017-09-07",
"Cisco-IOS-XR-man-ems-cfg": "2018-04-16",
"Cisco-IOS-XR-man-ems-oper": "2015-11-09",
"Cisco-IOS-XR-man-ems-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-man-ipsla-cfg": "2015-11-09",
"Cisco-IOS-XR-man-ipsla-oper": "2015-11-09",
"Cisco-IOS-XR-man-ipsla-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-man-ipsla-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-man-ipsla-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-man-netconf-cfg": "2018-05-04",
"Cisco-IOS-XR-man-xml-ttyagent-cfg": "2017-05-01",
"Cisco-IOS-XR-man-xml-ttyagent-oper": "2017-09-07",
"Cisco-IOS-XR-man-xml-ttyagent-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-manageability-object-tracking-cfg": "2017-09-07",
"Cisco-IOS-XR-manageability-object-tracking-datatypes": "2017-05-01",
"Cisco-IOS-XR-manageability-object-tracking-oper": "2015-11-09",
"Cisco-IOS-XR-manageability-object-tracking-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-manageability-perfmgmt-cfg": "2017-09-07",
"Cisco-IOS-XR-manageability-perfmgmt-datatypes": "2015-11-09",
"Cisco-IOS-XR-manageability-perfmgmt-oper": "2017-09-07",
"Cisco-IOS-XR-manageability-perfmgmt-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-mdrv-lib-cfg": "2015-11-09",
"Cisco-IOS-XR-mediasvr-linux-oper": "2015-11-09",
"Cisco-IOS-XR-mediasvr-linux-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-mpls-io-cfg": "2017-05-18",
"Cisco-IOS-XR-mpls-io-oper": "2017-05-18",
"Cisco-IOS-XR-mpls-io-oper-sub1": "2017-05-18",
"Cisco-IOS-XR-mpls-ldp-cfg": "2018-06-15",
"Cisco-IOS-XR-mpls-ldp-cfg-datatypes": "2015-11-09",
"Cisco-IOS-XR-mpls-ldp-oper": "2017-09-07",
"Cisco-IOS-XR-mpls-ldp-oper-datatypes": "2015-11-09",
"Cisco-IOS-XR-mpls-ldp-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-mpls-ldp-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-mpls-ldp-oper-sub3": "2017-09-07",
"Cisco-IOS-XR-mpls-lsd-cfg": "2019-08-20",
"Cisco-IOS-XR-mpls-lsd-oper": "2017-09-07",
"Cisco-IOS-XR-mpls-lsd-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-mpls-oam-cfg": "2015-11-09",
"Cisco-IOS-XR-mpls-oam-oper": "2015-11-09",
"Cisco-IOS-XR-mpls-oam-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-mpls-static-cfg": "2017-09-07",
"Cisco-IOS-XR-mpls-static-oper": "2017-05-01",
"Cisco-IOS-XR-mpls-static-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-mpls-te-cfg": "2019-10-16",
"Cisco-IOS-XR-mpls-te-datatypes": "2019-09-30",
"Cisco-IOS-XR-mpls-te-oper": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub1": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub2": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub3": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub4": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub5": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub6": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub7": "2018-08-01",
"Cisco-IOS-XR-mpls-te-oper-sub8": "2018-08-01",
"Cisco-IOS-XR-mpls-vpn-cfg": "2017-09-07",
"Cisco-IOS-XR-mpls-vpn-oper": "2015-11-09",
"Cisco-IOS-XR-mpls-vpn-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ncs5k-fea-pfilter-nonatomic-cfg": "2016-09-01",
"Cisco-IOS-XR-nto-misc-oper": "2015-11-09",
"Cisco-IOS-XR-nto-misc-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-opendns-deviceid-cfg": "2015-11-09",
"Cisco-IOS-XR-opticalmib-cfg": "2015-11-09",
"Cisco-IOS-XR-opticalotsmib-cfg": "2015-11-09",
"Cisco-IOS-XR-optics-driver-cfg": "2016-03-21",
"Cisco-IOS-XR-optics-driver-quad-cfg": "2018-07-21",
"Cisco-IOS-XR-otnifmib-cfg": "2015-11-09",
"Cisco-IOS-XR-parser-cfg": "2017-05-09",
"Cisco-IOS-XR-pbr-bng-cfg": "2015-11-09",
"Cisco-IOS-XR-pbr-cfg": "2018-05-17",
"Cisco-IOS-XR-pbr-datatypes": "2015-11-09",
"Cisco-IOS-XR-pbr-oper": "2015-11-09",
"Cisco-IOS-XR-pbr-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-pbr-subscriber-cfg": "2015-11-09",
"Cisco-IOS-XR-pbr-vrf-policy-cfg": "2015-11-09",
"Cisco-IOS-XR-pbr-vservice-ea-oper": "2017-05-01",
"Cisco-IOS-XR-pbr-vservice-ea-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-pbr-vservice-mgr-oper": "2017-05-01",
"Cisco-IOS-XR-pbr-vservice-mgr-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-perf-meas-cfg": "2017-10-17",
"Cisco-IOS-XR-perf-meas-oper": "2017-10-17",
"Cisco-IOS-XR-perf-meas-oper-sub1": "2017-10-17",
"Cisco-IOS-XR-perf-meas-oper-sub2": "2017-10-17",
"Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper": "2015-11-09",
"Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-pfi-im-cmd-oper": "2017-06-26",
"Cisco-IOS-XR-pfi-im-cmd-oper-sub1": "2017-06-26",
"Cisco-IOS-XR-pfi-im-cmd-oper-sub2": "2017-06-26",
"Cisco-IOS-XR-pfm-oper": "2017-03-28",
"Cisco-IOS-XR-ping-act": "2018-10-01",
"Cisco-IOS-XR-plat-chas-invmgr-ng-oper": "2018-01-22",
"Cisco-IOS-XR-plat-chas-invmgr-ng-oper-sub1": "2018-01-22",
"Cisco-IOS-XR-plat-chas-invmgr-ng-oper-sub2": "2018-01-22",
"Cisco-IOS-XR-plat-chas-invmgr-oper": "2018-01-22",
"Cisco-IOS-XR-plat-chas-invmgr-oper-sub1": "2018-01-22",
"Cisco-IOS-XR-plat-chas-invmgr-oper-sub2": "2018-01-22",
"Cisco-IOS-XR-platform-pifib-oper": "2016-02-22",
"Cisco-IOS-XR-platform-pifib-oper-sub1": "2016-02-22",
"Cisco-IOS-XR-pmengine-cfg": "2017-09-07",
"Cisco-IOS-XR-pmengine-oper": "2017-09-07",
"Cisco-IOS-XR-pmengine-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-policy-repository-cfg": "2017-09-07",
"Cisco-IOS-XR-policy-repository-oper": "2017-09-22",
"Cisco-IOS-XR-policy-repository-oper-sub1": "2017-09-22",
"Cisco-IOS-XR-ppp-ea-oper": "2015-11-09",
"Cisco-IOS-XR-ppp-ea-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-cfg": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-fsm-cfg": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-gbl-cfg": "2017-11-05",
"Cisco-IOS-XR-ppp-ma-ipcp-cfg": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-ipcpiw-cfg": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-lcp-cfg": "2017-09-07",
"Cisco-IOS-XR-ppp-ma-oper": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-oper-sub3": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-oper-sub4": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-ssrp-cfg": "2015-11-09",
"Cisco-IOS-XR-ppp-ma-syslog-cfg": "2015-11-09",
"Cisco-IOS-XR-pppoe-ea-oper": "2015-11-09",
"Cisco-IOS-XR-pppoe-ea-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-prm-hwmod-cfg": "2015-11-09",
"Cisco-IOS-XR-prm-hwmod-profile-cfg": "2017-12-05",
"Cisco-IOS-XR-prm-hwmod-sr-cfg": "2017-04-12",
"Cisco-IOS-XR-prm-server-oper": "2016-02-22",
"Cisco-IOS-XR-prm-server-oper-sub1": "2016-02-22",
"Cisco-IOS-XR-procfind-oper": "2015-11-09",
"Cisco-IOS-XR-procfind-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-procmem-oper": "2017-09-07",
"Cisco-IOS-XR-procmem-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-ptp-cfg": "2017-02-02",
"Cisco-IOS-XR-ptp-datatypes": "2015-11-09",
"Cisco-IOS-XR-ptp-oper": "2017-02-02",
"Cisco-IOS-XR-ptp-oper-sub1": "2017-02-02",
"Cisco-IOS-XR-qos-ma-bng-cfg": "2016-04-01",
"Cisco-IOS-XR-qos-ma-cfg": "2018-02-27",
"Cisco-IOS-XR-qos-ma-oper": "2017-09-13",
"Cisco-IOS-XR-qos-ma-sat-cfg": "2017-03-03",
"Cisco-IOS-XR-qos-mibs-cfg": "2017-05-01",
"Cisco-IOS-XR-remote-attestation-act": "2017-06-08",
"Cisco-IOS-XR-rgmgr-cfg": "2017-08-01",
"Cisco-IOS-XR-rgmgr-oper": "2015-01-07",
"Cisco-IOS-XR-rgmgr-oper-sub1": "2015-01-07",
"Cisco-IOS-XR-sdr-invmgr-diag-oper": "2018-01-31",
"Cisco-IOS-XR-sdr-invmgr-diag-oper-sub1": "2018-01-31",
"Cisco-IOS-XR-sdr-invmgr-oper": "2015-11-09",
"Cisco-IOS-XR-sdr-invmgr-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-segment-routing-ms-cfg": "2017-09-07",
"Cisco-IOS-XR-segment-routing-ms-common-cfg": "2015-11-09",
"Cisco-IOS-XR-segment-routing-ms-oper": "2017-09-07",
"Cisco-IOS-XR-segment-routing-ms-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-segment-routing-ms-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-segment-routing-ms-oper-sub3": "2017-09-07",
"Cisco-IOS-XR-segment-routing-srv6-cfg": "2015-11-09",
"Cisco-IOS-XR-segment-routing-srv6-datatypes": "2015-11-09",
"Cisco-IOS-XR-segment-routing-srv6-oper": "2015-11-09",
"Cisco-IOS-XR-segment-routing-srv6-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-shellutil-cfg": "2015-10-12",
"Cisco-IOS-XR-shellutil-copy-act": "2018-05-20",
"Cisco-IOS-XR-shellutil-delete-act": "2018-01-20",
"Cisco-IOS-XR-shellutil-filesystem-oper": "2015-11-09",
"Cisco-IOS-XR-shellutil-filesystem-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-shellutil-oper": "2015-01-07",
"Cisco-IOS-XR-shellutil-oper-sub1": "2015-01-07",
"Cisco-IOS-XR-show-fpd-loc-ng-oper": "2017-05-01",
"Cisco-IOS-XR-show-fpd-loc-ng-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-skp-qos-oper": "2016-02-18",
"Cisco-IOS-XR-skp-qos-oper-sub1": "2016-02-18",
"Cisco-IOS-XR-skp-qos-oper-sub2": "2016-02-18",
"Cisco-IOS-XR-skywarp-netflow-oper": "2015-11-09",
"Cisco-IOS-XR-skywarp-netflow-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-skywarp-netflow-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-snmp-agent-cfg": "2018-06-27",
"Cisco-IOS-XR-snmp-agent-oper": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub1": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub2": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub3": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub4": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub5": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub6": "2018-07-20",
"Cisco-IOS-XR-snmp-agent-oper-sub7": "2018-07-20",
"Cisco-IOS-XR-snmp-bridgemib-cfg": "2015-11-09",
"Cisco-IOS-XR-snmp-ciscosensormib-cfg": "2017-05-01",
"Cisco-IOS-XR-snmp-entityextmib-cfg": "2015-11-09",
"Cisco-IOS-XR-snmp-entitymib-cfg": "2017-05-01",
"Cisco-IOS-XR-snmp-entitymib-oper": "2015-11-09",
"Cisco-IOS-XR-snmp-entitymib-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-snmp-entstatemib-cfg": "2015-07-27",
"Cisco-IOS-XR-snmp-frucontrolmib-cfg": "2015-01-07",
"Cisco-IOS-XR-snmp-ifmib-cfg": "2017-05-01",
"Cisco-IOS-XR-snmp-ifmib-oper": "2015-01-07",
"Cisco-IOS-XR-snmp-ifmib-oper-sub1": "2015-01-07",
"Cisco-IOS-XR-snmp-mib-rfmib-cfg": "2016-05-13",
"Cisco-IOS-XR-snmp-sensormib-oper": "2015-11-09",
"Cisco-IOS-XR-snmp-sensormib-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-snmp-sensormib-oper-sub2": "2015-11-09",
"Cisco-IOS-XR-snmp-syslogmib-cfg": "2015-12-01",
"Cisco-IOS-XR-snmp-test-trap-act": "2017-05-01",
"Cisco-IOS-XR-spirit-corehelper-cfg": "2015-11-09",
"Cisco-IOS-XR-spirit-install-act": "2018-09-10",
"Cisco-IOS-XR-spirit-install-instmgr-oper": "2019-08-24",
"Cisco-IOS-XR-spirit-install-instmgr-oper-sub1": "2019-08-24",
"Cisco-IOS-XR-spirit-install-instmgr-oper-sub2": "2019-08-24",
"Cisco-IOS-XR-sse-span-oper": "2017-09-07",
"Cisco-IOS-XR-sse-span-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-subscriber-accounting-cfg": "2017-09-07",
"Cisco-IOS-XR-subscriber-accounting-oper": "2018-10-30",
"Cisco-IOS-XR-subscriber-accounting-oper-sub1": "2018-10-30",
"Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg": "2015-01-07",
"Cisco-IOS-XR-subscriber-ipsub-cfg": "2017-09-07",
"Cisco-IOS-XR-subscriber-ipsub-oper": "2015-11-09",
"Cisco-IOS-XR-subscriber-ipsub-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-subscriber-pppoe-ma-cmd-cfg": "2015-11-09",
"Cisco-IOS-XR-subscriber-pppoe-ma-gbl-cfg": "2019-10-07",
"Cisco-IOS-XR-subscriber-pppoe-ma-oper": "2019-10-07",
"Cisco-IOS-XR-subscriber-pppoe-ma-oper-sub1": "2019-10-07",
"Cisco-IOS-XR-subscriber-session-mon-mibs-cfg": "2015-11-09",
"Cisco-IOS-XR-subscriber-session-mon-oper": "2015-11-09",
"Cisco-IOS-XR-subscriber-session-mon-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-subscriber-srg-cfg": "2017-09-07",
"Cisco-IOS-XR-subscriber-srg-oper": "2019-10-03",
"Cisco-IOS-XR-subscriber-srg-oper-sub1": "2019-10-03",
"Cisco-IOS-XR-sysadmin-aaa-aaa-show": "2017-05-10",
"Cisco-IOS-XR-sysadmin-aaa-disaster-recovery": "2017-05-10",
"Cisco-IOS-XR-sysadmin-alarm-mgr": "2018-04-09",
"Cisco-IOS-XR-sysadmin-asr9k-envmon-types": "2017-11-27",
"Cisco-IOS-XR-sysadmin-asr9k-envmon-ui": "2018-04-09",
"Cisco-IOS-XR-sysadmin-clear-asr9k": "2017-11-10",
"Cisco-IOS-XR-sysadmin-cm": "2018-07-03",
"Cisco-IOS-XR-sysadmin-controllers-asr9k": "2017-11-10",
"Cisco-IOS-XR-sysadmin-debug-trace": "2017-04-12",
"Cisco-IOS-XR-sysadmin-ds": "2018-05-21",
"Cisco-IOS-XR-sysadmin-dumper": "2017-05-09",
"Cisco-IOS-XR-sysadmin-entity-mib": "2017-04-12",
"Cisco-IOS-XR-sysadmin-entity-sensor-mib": "2017-04-12",
"Cisco-IOS-XR-sysadmin-entity-state-mib": "2017-04-12",
"Cisco-IOS-XR-sysadmin-entity-state-tc-mib": "2017-04-12",
"Cisco-IOS-XR-sysadmin-envmon-types": "2017-11-27",
"Cisco-IOS-XR-sysadmin-envmon-ui": "2018-04-09",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-admin-exec": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-bridge": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-clear": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-cli-asr9k": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-debug": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-fdb": "2018-10-26",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mac": "2018-04-09",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mgmt-agent": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mlap": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-reachable": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-sdr": "2018-04-09",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-serdes": "2018-04-09",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-sfp": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-statistics": "2018-02-23",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-summary": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-trace": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-trunk": "2017-05-01",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-types": "2018-10-26",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-vlan": "2018-04-09",
"Cisco-IOS-XR-sysadmin-external-usb": "2017-04-10",
"Cisco-IOS-XR-sysadmin-fm": "2016-04-12",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd": "2017-05-01",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd-service": "2017-05-01",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpdserv-ctrace": "2017-05-01",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-shhwfpd": "2017-05-01",
"Cisco-IOS-XR-sysadmin-hw-module": "2017-01-31",
"Cisco-IOS-XR-sysadmin-hw-module-xrv9k": "2018-06-04",
"Cisco-IOS-XR-sysadmin-instmgr-oper": "2018-04-09",
"Cisco-IOS-XR-sysadmin-issu": "2019-06-12",
"Cisco-IOS-XR-sysadmin-led-mgr-ui": "2017-05-01",
"Cisco-IOS-XR-sysadmin-nto-misc-set-hostname": "2017-04-12",
"Cisco-IOS-XR-sysadmin-obfl": "2017-07-31",
"Cisco-IOS-XR-sysadmin-obfl-conf": "2017-07-31",
"Cisco-IOS-XR-sysadmin-pm": "2018-04-09",
"Cisco-IOS-XR-sysadmin-rvm-mgr": "2017-04-12",
"Cisco-IOS-XR-sysadmin-sdr-mgr": "2019-04-12",
"Cisco-IOS-XR-sysadmin-services": "2016-11-10",
"Cisco-IOS-XR-sysadmin-ship": "2017-05-09",
"Cisco-IOS-XR-sysadmin-show-diag": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-inv": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-obfl": "2018-10-05",
"Cisco-IOS-XR-sysadmin-show-trace": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-trace-cm": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-trace-debug-agent": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-trace-instagt": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-trace-instmgr": "2017-04-12",
"Cisco-IOS-XR-sysadmin-show-trace-vmm": "2017-04-12",
"Cisco-IOS-XR-sysadmin-sm": "2018-04-09",
"Cisco-IOS-XR-sysadmin-sm-hw-mod": "2017-04-12",
"Cisco-IOS-XR-sysadmin-syslog": "2017-05-09",
"Cisco-IOS-XR-sysadmin-system": "2018-04-09",
"Cisco-IOS-XR-sysadmin-tacacs-show-tacacs": "2017-05-10",
"Cisco-IOS-XR-sysadmin-tacacs-tacacs-server": "2017-05-10",
"Cisco-IOS-XR-sysadmin-tacacs-test-tacacs": "2017-05-10",
"Cisco-IOS-XR-sysadmin-time-of-day-timezone": "2016-07-04",
"Cisco-IOS-XR-sysadmin-types": "2018-04-09",
"Cisco-IOS-XR-sysadmin-vm": "2018-11-20",
"Cisco-IOS-XR-sysadmin-vm-mgr": "2018-07-13",
"Cisco-IOS-XR-sysadmin-wdmon": "2018-04-09",
"Cisco-IOS-XR-sysadmin-zapdisk": "2017-05-23",
"Cisco-IOS-XR-sysdb-oper": "2015-11-09",
"Cisco-IOS-XR-syslog-act": "2016-04-17",
"Cisco-IOS-XR-sysmgr-act": "2017-03-03",
"Cisco-IOS-XR-sysmgr-cfg": "2015-11-09",
"Cisco-IOS-XR-sysmgr-oper": "2015-11-09",
"Cisco-IOS-XR-sysmgr-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-telemetry-model-driven-cfg": "2019-08-29",
"Cisco-IOS-XR-telemetry-model-driven-oper": "2017-09-27",
"Cisco-IOS-XR-telemetry-model-driven-oper-sub1": "2017-09-27",
"Cisco-IOS-XR-telemetry-model-driven-oper-sub2": "2017-09-27",
"Cisco-IOS-XR-traceroute-act": "2018-10-01",
"Cisco-IOS-XR-traffmon-netflow-cfg": "2018-06-15",
"Cisco-IOS-XR-tty-management-cfg": "2017-09-07",
"Cisco-IOS-XR-tty-management-cmd-oper": "2017-09-07",
"Cisco-IOS-XR-tty-management-cmd-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-tty-management-datatypes": "2017-08-07",
"Cisco-IOS-XR-tty-management-oper": "2017-05-01",
"Cisco-IOS-XR-tty-management-oper-sub1": "2017-05-01",
"Cisco-IOS-XR-tty-server-cfg": "2015-07-30",
"Cisco-IOS-XR-tty-server-oper": "2017-09-07",
"Cisco-IOS-XR-tty-server-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-tty-server-oper-sub2": "2017-09-07",
"Cisco-IOS-XR-tty-server-oper-sub3": "2017-09-07",
"Cisco-IOS-XR-tty-server-oper-sub4": "2017-09-07",
"Cisco-IOS-XR-tty-server-oper-sub5": "2017-09-07",
"Cisco-IOS-XR-tty-vty-cfg": "2017-09-07",
"Cisco-IOS-XR-tunnel-gre-cfg": "2017-09-07",
"Cisco-IOS-XR-tunnel-l2tun-cfg": "2017-09-07",
"Cisco-IOS-XR-tunnel-l2tun-oper": "2018-11-01",
"Cisco-IOS-XR-tunnel-l2tun-oper-sub1": "2018-11-01",
"Cisco-IOS-XR-tunnel-l2tun-proto-mibs-cfg": "2015-11-09",
"Cisco-IOS-XR-tunnel-nve-cfg": "2016-08-30",
"Cisco-IOS-XR-tunnel-nve-oper": "2015-11-09",
"Cisco-IOS-XR-tunnel-nve-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-tunnel-vpdn-cfg": "2017-09-07",
"Cisco-IOS-XR-tunnel-vpdn-oper": "2015-11-09",
"Cisco-IOS-XR-tunnel-vpdn-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-types": "2018-06-29",
"Cisco-IOS-XR-upgrade-fpd-admin-cfg": "2015-11-09",
"Cisco-IOS-XR-upgrade-fpd-ng-act": "2017-04-04",
"Cisco-IOS-XR-upgrade-fpd-oper": "2017-09-07",
"Cisco-IOS-XR-upgrade-fpd-oper-sub1": "2017-09-07",
"Cisco-IOS-XR-vservice-cfg": "2017-09-07",
"Cisco-IOS-XR-wanphy-ui-cfg": "2015-11-09",
"Cisco-IOS-XR-wanphy-ui-oper": "2015-11-09",
"Cisco-IOS-XR-wanphy-ui-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-watchd-cfg": "2015-11-09",
"Cisco-IOS-XR-wd-cfg": "2015-11-09",
"Cisco-IOS-XR-wd-oper": "2015-11-09",
"Cisco-IOS-XR-wd-oper-sub1": "2015-11-09",
"Cisco-IOS-XR-wdsysmon-fd-oper": "2019-07-05",
"Cisco-IOS-XR-wdsysmon-fd-oper-sub1": "2019-07-05",
"INET-ADDRESS-MIB": "2002-05-09",
"IPV6-TC": "1998-12-01",
"SNMP-COMMUNITY-MIB": "2003-08-06",
"SNMP-FRAMEWORK-MIB": "2002-10-14",
"SNMP-MPD-MIB": "2002-10-14",
"SNMP-NOTIFICATION-MIB": "2002-10-14",
"SNMP-TARGET-MIB": "2002-10-14",
"SNMP-USER-BASED-SM-MIB": "2002-10-16",
"SNMP-VIEW-BASED-ACM-MIB": "2002-10-16",
"SNMPv2-MIB": "2002-10-16",
"SNMPv2-SMI": "",
"SNMPv2-TC": "2016-08-18",
"ccc": "2016-10-12",
"nc-notifications": "2008-07-14",
"notifications": "2008-07-14",
"ntp": "2016-07-04",
"opertest1": "2016-10-12",
"tailf-aaa": "2011-09-22",
"tailf-actions": "2017-02-28",
"tailf-cli-extensions": "2017-08-23",
"tailf-common": "2017-08-23",
"tailf-common-monitoring": "2013-06-14",
"tailf-common-query": "2017-12-15",
"tailf-confd-monitoring": "2013-06-14",
"tailf-meta-extensions": "2017-03-08",
"tailf-netconf-query": "2017-01-06",
"tailf-xsd-types": "2017-11-20",
"valtest": "2012-08-20",
"vplatform": "2015-08-30",
}
ENTITY_LOOKUP = {
("http://tail-f.com/ns/mibs/CISCO-ENTITY-FRU-CONTROL-MIB/200311240000Z", "CISCO-ENTITY-FRU-CONTROL-MIB"): "CISCO_ENTITY_FRU_CONTROL_MIB.CISCOENTITYFRUCONTROLMIB",
("CISCO-ENTITY-FRU-CONTROL-MIB", "CISCO-ENTITY-FRU-CONTROL-MIB"): "CISCO_ENTITY_FRU_CONTROL_MIB.CISCOENTITYFRUCONTROLMIB",
("http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-cfg", "span-monitor-session"): "Cisco_IOS_XR_Ethernet_SPAN_cfg.SpanMonitorSession",
("Cisco-IOS-XR-Ethernet-SPAN-cfg", "span-monitor-session"): "Cisco_IOS_XR_Ethernet_SPAN_cfg.SpanMonitorSession",
("http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-oper", "span-monitor-session"): "Cisco_IOS_XR_Ethernet_SPAN_oper.SpanMonitorSession",
("Cisco-IOS-XR-Ethernet-SPAN-oper", "span-monitor-session"): "Cisco_IOS_XR_Ethernet_SPAN_oper.SpanMonitorSession",
("http://cisco.com/ns/yang/Cisco-IOS-XR-Subscriber-infra-subdb-oper", "subscriber-database"): "Cisco_IOS_XR_Subscriber_infra_subdb_oper.SubscriberDatabase",
("Cisco-IOS-XR-Subscriber-infra-subdb-oper", "subscriber-database"): "Cisco_IOS_XR_Subscriber_infra_subdb_oper.SubscriberDatabase",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-lib-cfg", "aaa"): "Cisco_IOS_XR_aaa_lib_cfg.Aaa",
("Cisco-IOS-XR-aaa-lib-cfg", "aaa"): "Cisco_IOS_XR_aaa_lib_cfg.Aaa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-locald-admin-cfg", "aaa"): "Cisco_IOS_XR_aaa_locald_admin_cfg.Aaa",
("Cisco-IOS-XR-aaa-locald-admin-cfg", "aaa"): "Cisco_IOS_XR_aaa_locald_admin_cfg.Aaa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-locald-oper", "aaa"): "Cisco_IOS_XR_aaa_locald_oper.Aaa",
("Cisco-IOS-XR-aaa-locald-oper", "aaa"): "Cisco_IOS_XR_aaa_locald_oper.Aaa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-nacm-cfg", "nacm"): "Cisco_IOS_XR_aaa_nacm_cfg.Nacm",
("Cisco-IOS-XR-aaa-nacm-cfg", "nacm"): "Cisco_IOS_XR_aaa_nacm_cfg.Nacm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-nacm-oper", "aaa-nacm"): "Cisco_IOS_XR_aaa_nacm_oper.AaaNacm",
("Cisco-IOS-XR-aaa-nacm-oper", "aaa-nacm"): "Cisco_IOS_XR_aaa_nacm_oper.AaaNacm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-protocol-radius-oper", "radius"): "Cisco_IOS_XR_aaa_protocol_radius_oper.Radius",
("Cisco-IOS-XR-aaa-protocol-radius-oper", "radius"): "Cisco_IOS_XR_aaa_protocol_radius_oper.Radius",
("http://cisco.com/ns/yang/Cisco-IOS-XR-accounting-cfg", "accounting"): "Cisco_IOS_XR_accounting_cfg.Accounting",
("Cisco-IOS-XR-accounting-cfg", "accounting"): "Cisco_IOS_XR_accounting_cfg.Accounting",
("http://cisco.com/ns/yang/Cisco-IOS-XR-alarmgr-server-oper", "alarms"): "Cisco_IOS_XR_alarmgr_server_oper.Alarms",
("Cisco-IOS-XR-alarmgr-server-oper", "alarms"): "Cisco_IOS_XR_alarmgr_server_oper.Alarms",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ascii-ltrace-oper", "ltrace"): "Cisco_IOS_XR_ascii_ltrace_oper.Ltrace",
("Cisco-IOS-XR-ascii-ltrace-oper", "ltrace"): "Cisco_IOS_XR_ascii_ltrace_oper.Ltrace",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asic-errors-oper", "asic-errors"): "Cisco_IOS_XR_asic_errors_oper.AsicErrors",
("Cisco-IOS-XR-asic-errors-oper", "asic-errors"): "Cisco_IOS_XR_asic_errors_oper.AsicErrors",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-ep-port-mode-cfg", "hw-module-ep-port-mode"): "Cisco_IOS_XR_asr9k_ep_port_mode_cfg.HwModuleEpPortMode",
("Cisco-IOS-XR-asr9k-ep-port-mode-cfg", "hw-module-ep-port-mode"): "Cisco_IOS_XR_asr9k_ep_port_mode_cfg.HwModuleEpPortMode",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fab-cfg", "fab-vqi-config"): "Cisco_IOS_XR_asr9k_fab_cfg.FabVqiConfig",
("Cisco-IOS-XR-asr9k-fab-cfg", "fab-vqi-config"): "Cisco_IOS_XR_asr9k_fab_cfg.FabVqiConfig",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fia-cfg", "fabric-fia-config"): "Cisco_IOS_XR_asr9k_fia_cfg.FabricFiaConfig",
("Cisco-IOS-XR-asr9k-fia-cfg", "fabric-fia-config"): "Cisco_IOS_XR_asr9k_fia_cfg.FabricFiaConfig",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fsi-oper", "fabric-stats"): "Cisco_IOS_XR_asr9k_fsi_oper.FabricStats",
("Cisco-IOS-XR-asr9k-fsi-oper", "fabric-stats"): "Cisco_IOS_XR_asr9k_fsi_oper.FabricStats",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-ethctrl-oper", "mlan"): "Cisco_IOS_XR_asr9k_lc_ethctrl_oper.Mlan",
("Cisco-IOS-XR-asr9k-lc-ethctrl-oper", "mlan"): "Cisco_IOS_XR_asr9k_lc_ethctrl_oper.Mlan",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-fca-oper", "mpa-internal"): "Cisco_IOS_XR_asr9k_lc_fca_oper.MpaInternal",
("Cisco-IOS-XR-asr9k-lc-fca-oper", "mpa-internal"): "Cisco_IOS_XR_asr9k_lc_fca_oper.MpaInternal",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-fca-oper", "mpa"): "Cisco_IOS_XR_asr9k_lc_fca_oper.Mpa",
("Cisco-IOS-XR-asr9k-lc-fca-oper", "mpa"): "Cisco_IOS_XR_asr9k_lc_fca_oper.Mpa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-pwrglide-cfg", "hardware-module-port-mode"): "Cisco_IOS_XR_asr9k_lc_pwrglide_cfg.HardwareModulePortMode",
("Cisco-IOS-XR-asr9k-lc-pwrglide-cfg", "hardware-module-port-mode"): "Cisco_IOS_XR_asr9k_lc_pwrglide_cfg.HardwareModulePortMode",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib-static"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfibStatic",
("Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib-static"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfibStatic",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfib",
("Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib-np-stats"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfibNpStats",
("Cisco-IOS-XR-asr9k-lpts-oper", "platform-lptsp-ifib-np-stats"): "Cisco_IOS_XR_asr9k_lpts_oper.PlatformLptspIfibNpStats",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-netflow-oper", "net-flow"): "Cisco_IOS_XR_asr9k_netflow_oper.NetFlow",
("Cisco-IOS-XR-asr9k-netflow-oper", "net-flow"): "Cisco_IOS_XR_asr9k_netflow_oper.NetFlow",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-np-oper", "hardware-module-np"): "Cisco_IOS_XR_asr9k_np_oper.HardwareModuleNp",
("Cisco-IOS-XR-asr9k-np-oper", "hardware-module-np"): "Cisco_IOS_XR_asr9k_np_oper.HardwareModuleNp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-qos-mode"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleQosMode",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-qos-mode"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleQosMode",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-processor"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleProcessor",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-processor"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleProcessor",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-tcp-mss-adjust"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleTcpMssAdjust",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-tcp-mss-adjust"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleTcpMssAdjust",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-tcam"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleTcam",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-tcam"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleTcam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-profile"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleProfile",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-profile"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleProfile",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-efd"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleEfd",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-efd"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleEfd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-all-qos-mode"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleAllQosMode",
("Cisco-IOS-XR-asr9k-prm-cfg", "hardware-module-all-qos-mode"): "Cisco_IOS_XR_asr9k_prm_cfg.HardwareModuleAllQosMode",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-ptp-pd-oper", "platform-ptp"): "Cisco_IOS_XR_asr9k_ptp_pd_oper.PlatformPtp",
("Cisco-IOS-XR-asr9k-ptp-pd-oper", "platform-ptp"): "Cisco_IOS_XR_asr9k_ptp_pd_oper.PlatformPtp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-qos-oper", "platform-qos"): "Cisco_IOS_XR_asr9k_qos_oper.PlatformQos",
("Cisco-IOS-XR-asr9k-qos-oper", "platform-qos"): "Cisco_IOS_XR_asr9k_qos_oper.PlatformQos",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-diag-admin-oper", "diag"): "Cisco_IOS_XR_asr9k_sc_diag_admin_oper.Diag",
("Cisco-IOS-XR-asr9k-sc-diag-admin-oper", "diag"): "Cisco_IOS_XR_asr9k_sc_diag_admin_oper.Diag",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-diag-oper", "diag"): "Cisco_IOS_XR_asr9k_sc_diag_oper.Diag",
("Cisco-IOS-XR-asr9k-sc-diag-oper", "diag"): "Cisco_IOS_XR_asr9k_sc_diag_oper.Diag",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-envmon-admin-oper", "environmental-monitoring"): "Cisco_IOS_XR_asr9k_sc_envmon_admin_oper.EnvironmentalMonitoring",
("Cisco-IOS-XR-asr9k-sc-envmon-admin-oper", "environmental-monitoring"): "Cisco_IOS_XR_asr9k_sc_envmon_admin_oper.EnvironmentalMonitoring",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-envmon-oper", "environmental-monitoring-cli"): "Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli",
("Cisco-IOS-XR-asr9k-sc-envmon-oper", "environmental-monitoring-cli"): "Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-envmon-oper", "environmental-monitoring"): "Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring",
("Cisco-IOS-XR-asr9k-sc-envmon-oper", "environmental-monitoring"): "Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper", "inventory"): "Cisco_IOS_XR_asr9k_sc_invmgr_admin_oper.Inventory",
("Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper", "inventory"): "Cisco_IOS_XR_asr9k_sc_invmgr_admin_oper.Inventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-invmgr-oper", "inventory"): "Cisco_IOS_XR_asr9k_sc_invmgr_oper.Inventory",
("Cisco-IOS-XR-asr9k-sc-invmgr-oper", "inventory"): "Cisco_IOS_XR_asr9k_sc_invmgr_oper.Inventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-xbar-oper", "cross-bar-stats"): "Cisco_IOS_XR_asr9k_xbar_oper.CrossBarStats",
("Cisco-IOS-XR-asr9k-xbar-oper", "cross-bar-stats"): "Cisco_IOS_XR_asr9k_xbar_oper.CrossBarStats",
("http://cisco.com/ns/yang/Cisco-IOS-XR-atm-vcm-oper", "atm-vcm"): "Cisco_IOS_XR_atm_vcm_oper.AtmVcm",
("Cisco-IOS-XR-atm-vcm-oper", "atm-vcm"): "Cisco_IOS_XR_atm_vcm_oper.AtmVcm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-cfg", "lacp"): "Cisco_IOS_XR_bundlemgr_cfg.Lacp",
("Cisco-IOS-XR-bundlemgr-cfg", "lacp"): "Cisco_IOS_XR_bundlemgr_cfg.Lacp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "lacp-bundles"): "Cisco_IOS_XR_bundlemgr_oper.LacpBundles",
("Cisco-IOS-XR-bundlemgr-oper", "lacp-bundles"): "Cisco_IOS_XR_bundlemgr_oper.LacpBundles",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "bundle-information"): "Cisco_IOS_XR_bundlemgr_oper.BundleInformation",
("Cisco-IOS-XR-bundlemgr-oper", "bundle-information"): "Cisco_IOS_XR_bundlemgr_oper.BundleInformation",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "lacp-data"): "Cisco_IOS_XR_bundlemgr_oper.LacpData",
("Cisco-IOS-XR-bundlemgr-oper", "lacp-data"): "Cisco_IOS_XR_bundlemgr_oper.LacpData",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "bundles"): "Cisco_IOS_XR_bundlemgr_oper.Bundles",
("Cisco-IOS-XR-bundlemgr-oper", "bundles"): "Cisco_IOS_XR_bundlemgr_oper.Bundles",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "lacp-bundle-members"): "Cisco_IOS_XR_bundlemgr_oper.LacpBundleMembers",
("Cisco-IOS-XR-bundlemgr-oper", "lacp-bundle-members"): "Cisco_IOS_XR_bundlemgr_oper.LacpBundleMembers",
("http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper", "bundles-adjacency"): "Cisco_IOS_XR_bundlemgr_oper.BundlesAdjacency",
("Cisco-IOS-XR-bundlemgr-oper", "bundles-adjacency"): "Cisco_IOS_XR_bundlemgr_oper.BundlesAdjacency",
("http://cisco.com/ns/yang/Cisco-IOS-XR-call-home-cfg", "call-home"): "Cisco_IOS_XR_call_home_cfg.CallHome",
("Cisco-IOS-XR-call-home-cfg", "call-home"): "Cisco_IOS_XR_call_home_cfg.CallHome",
("http://cisco.com/ns/yang/Cisco-IOS-XR-cdp-cfg", "cdp"): "Cisco_IOS_XR_cdp_cfg.Cdp",
("Cisco-IOS-XR-cdp-cfg", "cdp"): "Cisco_IOS_XR_cdp_cfg.Cdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-cdp-oper", "cdp"): "Cisco_IOS_XR_cdp_oper.Cdp",
("Cisco-IOS-XR-cdp-oper", "cdp"): "Cisco_IOS_XR_cdp_oper.Cdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-cfg", "isis"): "Cisco_IOS_XR_clns_isis_cfg.Isis",
("Cisco-IOS-XR-clns-isis-cfg", "isis"): "Cisco_IOS_XR_clns_isis_cfg.Isis",
("http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-oper", "ocni-isis"): "Cisco_IOS_XR_clns_isis_oper.OcniIsis",
("Cisco-IOS-XR-clns-isis-oper", "ocni-isis"): "Cisco_IOS_XR_clns_isis_oper.OcniIsis",
("http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-oper", "isis"): "Cisco_IOS_XR_clns_isis_oper.Isis",
("Cisco-IOS-XR-clns-isis-oper", "isis"): "Cisco_IOS_XR_clns_isis_oper.Isis",
("http://cisco.com/ns/yang/Cisco-IOS-XR-cmproxy-oper", "sdr-inventory-vm"): "Cisco_IOS_XR_cmproxy_oper.SdrInventoryVm",
("Cisco-IOS-XR-cmproxy-oper", "sdr-inventory-vm"): "Cisco_IOS_XR_cmproxy_oper.SdrInventoryVm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-cfg", "cfgmgr"): "Cisco_IOS_XR_config_cfgmgr_cfg.Cfgmgr",
("Cisco-IOS-XR-config-cfgmgr-cfg", "cfgmgr"): "Cisco_IOS_XR_config_cfgmgr_cfg.Cfgmgr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-exec-oper", "config-manager"): "Cisco_IOS_XR_config_cfgmgr_exec_oper.ConfigManager",
("Cisco-IOS-XR-config-cfgmgr-exec-oper", "config-manager"): "Cisco_IOS_XR_config_cfgmgr_exec_oper.ConfigManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-oper", "config"): "Cisco_IOS_XR_config_cfgmgr_oper.Config",
("Cisco-IOS-XR-config-cfgmgr-oper", "config"): "Cisco_IOS_XR_config_cfgmgr_oper.Config",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-mda-cfg", "active-nodes"): "Cisco_IOS_XR_config_mda_cfg.ActiveNodes",
("Cisco-IOS-XR-config-mda-cfg", "active-nodes"): "Cisco_IOS_XR_config_mda_cfg.ActiveNodes",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-mda-cfg", "preconfigured-nodes"): "Cisco_IOS_XR_config_mda_cfg.PreconfiguredNodes",
("Cisco-IOS-XR-config-mda-cfg", "preconfigured-nodes"): "Cisco_IOS_XR_config_mda_cfg.PreconfiguredNodes",
("http://cisco.com/ns/yang/Cisco-IOS-XR-config-valid-ccv-cfg", "configurationvalidation"): "Cisco_IOS_XR_config_valid_ccv_cfg.Configurationvalidation",
("Cisco-IOS-XR-config-valid-ccv-cfg", "configurationvalidation"): "Cisco_IOS_XR_config_valid_ccv_cfg.Configurationvalidation",
("http://cisco.com/ns/yang/Cisco-IOS-XR-controller-odu-oper", "odu"): "Cisco_IOS_XR_controller_odu_oper.Odu",
("Cisco-IOS-XR-controller-odu-oper", "odu"): "Cisco_IOS_XR_controller_odu_oper.Odu",
("http://cisco.com/ns/yang/Cisco-IOS-XR-controller-optics-oper", "optics-oper"): "Cisco_IOS_XR_controller_optics_oper.OpticsOper",
("Cisco-IOS-XR-controller-optics-oper", "optics-oper"): "Cisco_IOS_XR_controller_optics_oper.OpticsOper",
("http://cisco.com/ns/yang/Cisco-IOS-XR-controller-otu-oper", "otu"): "Cisco_IOS_XR_controller_otu_oper.Otu",
("Cisco-IOS-XR-controller-otu-oper", "otu"): "Cisco_IOS_XR_controller_otu_oper.Otu",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-mka-cfg", "macsec"): "Cisco_IOS_XR_crypto_macsec_mka_cfg.Macsec",
("Cisco-IOS-XR-crypto-macsec-mka-cfg", "macsec"): "Cisco_IOS_XR_crypto_macsec_mka_cfg.Macsec",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-mka-oper", "macsec"): "Cisco_IOS_XR_crypto_macsec_mka_oper.Macsec",
("Cisco-IOS-XR-crypto-macsec-mka-oper", "macsec"): "Cisco_IOS_XR_crypto_macsec_mka_oper.Macsec",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-secy-oper", "macsec"): "Cisco_IOS_XR_crypto_macsec_secy_oper.Macsec",
("Cisco-IOS-XR-crypto-macsec-secy-oper", "macsec"): "Cisco_IOS_XR_crypto_macsec_secy_oper.Macsec",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-sam-cfg", "sam"): "Cisco_IOS_XR_crypto_sam_cfg.Sam",
("Cisco-IOS-XR-crypto-sam-cfg", "sam"): "Cisco_IOS_XR_crypto_sam_cfg.Sam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-sam-oper", "sam"): "Cisco_IOS_XR_crypto_sam_oper.Sam",
("Cisco-IOS-XR-crypto-sam-oper", "sam"): "Cisco_IOS_XR_crypto_sam_oper.Sam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-ssh-cfg", "ssh"): "Cisco_IOS_XR_crypto_ssh_cfg.Ssh",
("Cisco-IOS-XR-crypto-ssh-cfg", "ssh"): "Cisco_IOS_XR_crypto_ssh_cfg.Ssh",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-ssh-oper", "ssh1"): "Cisco_IOS_XR_crypto_ssh_oper.Ssh1",
("Cisco-IOS-XR-crypto-ssh-oper", "ssh1"): "Cisco_IOS_XR_crypto_ssh_oper.Ssh1",
("http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-ssh-oper", "ssh"): "Cisco_IOS_XR_crypto_ssh_oper.Ssh",
("Cisco-IOS-XR-crypto-ssh-oper", "ssh"): "Cisco_IOS_XR_crypto_ssh_oper.Ssh",
("http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-cfg", "dot1x"): "Cisco_IOS_XR_dot1x_cfg.Dot1x",
("Cisco-IOS-XR-dot1x-cfg", "dot1x"): "Cisco_IOS_XR_dot1x_cfg.Dot1x",
("http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-cfg", "eap"): "Cisco_IOS_XR_dot1x_cfg.Eap",
("Cisco-IOS-XR-dot1x-cfg", "eap"): "Cisco_IOS_XR_dot1x_cfg.Eap",
("http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-oper", "dot1x"): "Cisco_IOS_XR_dot1x_oper.Dot1x",
("Cisco-IOS-XR-dot1x-oper", "dot1x"): "Cisco_IOS_XR_dot1x_oper.Dot1x",
("http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-media-eth-oper", "ethernet-interface"): "Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface",
("Cisco-IOS-XR-drivers-media-eth-oper", "ethernet-interface"): "Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface",
("http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-mpa-infra-cfg", "hardware-module"): "Cisco_IOS_XR_drivers_mpa_infra_cfg.HardwareModule",
("Cisco-IOS-XR-drivers-mpa-infra-cfg", "hardware-module"): "Cisco_IOS_XR_drivers_mpa_infra_cfg.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-vpa-infra-cfg", "hardware-module"): "Cisco_IOS_XR_drivers_vpa_infra_cfg.HardwareModule",
("Cisco-IOS-XR-drivers-vpa-infra-cfg", "hardware-module"): "Cisco_IOS_XR_drivers_vpa_infra_cfg.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-dwdm-ui-oper", "dwdm"): "Cisco_IOS_XR_dwdm_ui_oper.Dwdm",
("Cisco-IOS-XR-dwdm-ui-oper", "dwdm"): "Cisco_IOS_XR_dwdm_ui_oper.Dwdm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-dwdm-ui-oper", "vtxp"): "Cisco_IOS_XR_dwdm_ui_oper.Vtxp",
("Cisco-IOS-XR-dwdm-ui-oper", "vtxp"): "Cisco_IOS_XR_dwdm_ui_oper.Vtxp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-eigrp-cfg", "eigrp"): "Cisco_IOS_XR_eigrp_cfg.Eigrp",
("Cisco-IOS-XR-eigrp-cfg", "eigrp"): "Cisco_IOS_XR_eigrp_cfg.Eigrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-eigrp-oper", "eigrp"): "Cisco_IOS_XR_eigrp_oper.Eigrp",
("Cisco-IOS-XR-eigrp-oper", "eigrp"): "Cisco_IOS_XR_eigrp_oper.Eigrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-es-acl-cfg", "es-acl"): "Cisco_IOS_XR_es_acl_cfg.EsAcl",
("Cisco-IOS-XR-es-acl-cfg", "es-acl"): "Cisco_IOS_XR_es_acl_cfg.EsAcl",
("http://cisco.com/ns/yang/Cisco-IOS-XR-es-acl-oper", "es-acl"): "Cisco_IOS_XR_es_acl_oper.EsAcl",
("Cisco-IOS-XR-es-acl-oper", "es-acl"): "Cisco_IOS_XR_es_acl_oper.EsAcl",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-cfm-oper", "cfm"): "Cisco_IOS_XR_ethernet_cfm_oper.Cfm",
("Cisco-IOS-XR-ethernet-cfm-oper", "cfm"): "Cisco_IOS_XR_ethernet_cfm_oper.Cfm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-link-oam-oper", "ether-link-oam"): "Cisco_IOS_XR_ethernet_link_oam_oper.EtherLinkOam",
("Cisco-IOS-XR-ethernet-link-oam-oper", "ether-link-oam"): "Cisco_IOS_XR_ethernet_link_oam_oper.EtherLinkOam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-lldp-cfg", "lldp"): "Cisco_IOS_XR_ethernet_lldp_cfg.Lldp",
("Cisco-IOS-XR-ethernet-lldp-cfg", "lldp"): "Cisco_IOS_XR_ethernet_lldp_cfg.Lldp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-lldp-oper", "lldp"): "Cisco_IOS_XR_ethernet_lldp_oper.Lldp",
("Cisco-IOS-XR-ethernet-lldp-oper", "lldp"): "Cisco_IOS_XR_ethernet_lldp_oper.Lldp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-evpn-oper", "evpn"): "Cisco_IOS_XR_evpn_oper.Evpn",
("Cisco-IOS-XR-evpn-oper", "evpn"): "Cisco_IOS_XR_evpn_oper.Evpn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-cfg", "fib"): "Cisco_IOS_XR_fib_common_cfg.Fib",
("Cisco-IOS-XR-fib-common-cfg", "fib"): "Cisco_IOS_XR_fib_common_cfg.Fib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-oper", "fib-statistics"): "Cisco_IOS_XR_fib_common_oper.FibStatistics",
("Cisco-IOS-XR-fib-common-oper", "fib-statistics"): "Cisco_IOS_XR_fib_common_oper.FibStatistics",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-oper", "fib"): "Cisco_IOS_XR_fib_common_oper.Fib",
("Cisco-IOS-XR-fib-common-oper", "fib"): "Cisco_IOS_XR_fib_common_oper.Fib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-oper", "oc-aft-l3"): "Cisco_IOS_XR_fib_common_oper.OcAftL3",
("Cisco-IOS-XR-fib-common-oper", "oc-aft-l3"): "Cisco_IOS_XR_fib_common_oper.OcAftL3",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-oper", "mpls-forwarding"): "Cisco_IOS_XR_fib_common_oper.MplsForwarding",
("Cisco-IOS-XR-fib-common-oper", "mpls-forwarding"): "Cisco_IOS_XR_fib_common_oper.MplsForwarding",
("http://cisco.com/ns/yang/Cisco-IOS-XR-flowspec-cfg", "flow-spec"): "Cisco_IOS_XR_flowspec_cfg.FlowSpec",
("Cisco-IOS-XR-flowspec-cfg", "flow-spec"): "Cisco_IOS_XR_flowspec_cfg.FlowSpec",
("http://cisco.com/ns/yang/Cisco-IOS-XR-flowspec-oper", "flow-spec"): "Cisco_IOS_XR_flowspec_oper.FlowSpec",
("Cisco-IOS-XR-flowspec-oper", "flow-spec"): "Cisco_IOS_XR_flowspec_oper.FlowSpec",
("http://cisco.com/ns/yang/Cisco-IOS-XR-fpd-infra-cfg", "fpd"): "Cisco_IOS_XR_fpd_infra_cfg.Fpd",
("Cisco-IOS-XR-fpd-infra-cfg", "fpd"): "Cisco_IOS_XR_fpd_infra_cfg.Fpd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-cfg", "frequency-synchronization"): "Cisco_IOS_XR_freqsync_cfg.FrequencySynchronization",
("Cisco-IOS-XR-freqsync-cfg", "frequency-synchronization"): "Cisco_IOS_XR_freqsync_cfg.FrequencySynchronization",
("http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-oper", "frequency-synchronization"): "Cisco_IOS_XR_freqsync_oper.FrequencySynchronization",
("Cisco-IOS-XR-freqsync-oper", "frequency-synchronization"): "Cisco_IOS_XR_freqsync_oper.FrequencySynchronization",
("http://cisco.com/ns/yang/Cisco-IOS-XR-group-cfg", "groups"): "Cisco_IOS_XR_group_cfg.Groups",
("Cisco-IOS-XR-group-cfg", "groups"): "Cisco_IOS_XR_group_cfg.Groups",
("http://cisco.com/ns/yang/Cisco-IOS-XR-group-cfg", "apply-groups"): "Cisco_IOS_XR_group_cfg.ApplyGroups",
("Cisco-IOS-XR-group-cfg", "apply-groups"): "Cisco_IOS_XR_group_cfg.ApplyGroups",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ha-eem-cfg", "event-manager"): "Cisco_IOS_XR_ha_eem_cfg.EventManager",
("Cisco-IOS-XR-ha-eem-cfg", "event-manager"): "Cisco_IOS_XR_ha_eem_cfg.EventManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ha-eem-policy-oper", "eem"): "Cisco_IOS_XR_ha_eem_policy_oper.Eem",
("Cisco-IOS-XR-ha-eem-policy-oper", "eem"): "Cisco_IOS_XR_ha_eem_policy_oper.Eem",
("http://cisco.com/ns/yang/Cisco-IOS-XR-hwmod-bcc-disable-cfg", "hardware-module-bcc-disable"): "Cisco_IOS_XR_hwmod_bcc_disable_cfg.HardwareModuleBccDisable",
("Cisco-IOS-XR-hwmod-bcc-disable-cfg", "hardware-module-bcc-disable"): "Cisco_IOS_XR_hwmod_bcc_disable_cfg.HardwareModuleBccDisable",
("http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-infra-cfg", "nv-satellite-global"): "Cisco_IOS_XR_icpe_infra_cfg.NvSatelliteGlobal",
("Cisco-IOS-XR-icpe-infra-cfg", "nv-satellite-global"): "Cisco_IOS_XR_icpe_infra_cfg.NvSatelliteGlobal",
("http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-infra-cfg", "nv-satellites"): "Cisco_IOS_XR_icpe_infra_cfg.NvSatellites",
("Cisco-IOS-XR-icpe-infra-cfg", "nv-satellites"): "Cisco_IOS_XR_icpe_infra_cfg.NvSatellites",
("http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-infra-oper", "nv-satellite"): "Cisco_IOS_XR_icpe_infra_oper.NvSatellite",
("Cisco-IOS-XR-icpe-infra-oper", "nv-satellite"): "Cisco_IOS_XR_icpe_infra_oper.NvSatellite",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-cfg", "subscriber-manager"): "Cisco_IOS_XR_iedge4710_cfg.SubscriberManager",
("Cisco-IOS-XR-iedge4710-cfg", "subscriber-manager"): "Cisco_IOS_XR_iedge4710_cfg.SubscriberManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-cfg", "subscriber-featurette"): "Cisco_IOS_XR_iedge4710_cfg.SubscriberFeaturette",
("Cisco-IOS-XR-iedge4710-cfg", "subscriber-featurette"): "Cisco_IOS_XR_iedge4710_cfg.SubscriberFeaturette",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-cfg", "iedge-license-manager"): "Cisco_IOS_XR_iedge4710_cfg.IedgeLicenseManager",
("Cisco-IOS-XR-iedge4710-cfg", "iedge-license-manager"): "Cisco_IOS_XR_iedge4710_cfg.IedgeLicenseManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-cfg", "sub-manager"): "Cisco_IOS_XR_iedge4710_cfg.SubManager",
("Cisco-IOS-XR-iedge4710-cfg", "sub-manager"): "Cisco_IOS_XR_iedge4710_cfg.SubManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-oper", "subscriber"): "Cisco_IOS_XR_iedge4710_oper.Subscriber",
("Cisco-IOS-XR-iedge4710-oper", "subscriber"): "Cisco_IOS_XR_iedge4710_oper.Subscriber",
("http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-oper", "iedge-license-manager"): "Cisco_IOS_XR_iedge4710_oper.IedgeLicenseManager",
("Cisco-IOS-XR-iedge4710-oper", "iedge-license-manager"): "Cisco_IOS_XR_iedge4710_oper.IedgeLicenseManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg", "global-interface-configuration"): "Cisco_IOS_XR_ifmgr_cfg.GlobalInterfaceConfiguration",
("Cisco-IOS-XR-ifmgr-cfg", "global-interface-configuration"): "Cisco_IOS_XR_ifmgr_cfg.GlobalInterfaceConfiguration",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg", "interface-configurations"): "Cisco_IOS_XR_ifmgr_cfg.InterfaceConfigurations",
("Cisco-IOS-XR-ifmgr-cfg", "interface-configurations"): "Cisco_IOS_XR_ifmgr_cfg.InterfaceConfigurations",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-oper", "interface-dampening"): "Cisco_IOS_XR_ifmgr_oper.InterfaceDampening",
("Cisco-IOS-XR-ifmgr-oper", "interface-dampening"): "Cisco_IOS_XR_ifmgr_oper.InterfaceDampening",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-oper", "interface-properties"): "Cisco_IOS_XR_ifmgr_oper.InterfaceProperties",
("Cisco-IOS-XR-ifmgr-oper", "interface-properties"): "Cisco_IOS_XR_ifmgr_oper.InterfaceProperties",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ikev2-cfg", "ikev2"): "Cisco_IOS_XR_ikev2_cfg.Ikev2",
("Cisco-IOS-XR-ikev2-cfg", "ikev2"): "Cisco_IOS_XR_ikev2_cfg.Ikev2",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ikev2-oper", "ik-ev2"): "Cisco_IOS_XR_ikev2_oper.IkEv2",
("Cisco-IOS-XR-ikev2-oper", "ik-ev2"): "Cisco_IOS_XR_ikev2_oper.IkEv2",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-alarm-logger-oper", "alarm-logger"): "Cisco_IOS_XR_infra_alarm_logger_oper.AlarmLogger",
("Cisco-IOS-XR-infra-alarm-logger-oper", "alarm-logger"): "Cisco_IOS_XR_infra_alarm_logger_oper.AlarmLogger",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-correlator-oper", "suppression"): "Cisco_IOS_XR_infra_correlator_oper.Suppression",
("Cisco-IOS-XR-infra-correlator-oper", "suppression"): "Cisco_IOS_XR_infra_correlator_oper.Suppression",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-correlator-oper", "correlator"): "Cisco_IOS_XR_infra_correlator_oper.Correlator",
("Cisco-IOS-XR-infra-correlator-oper", "correlator"): "Cisco_IOS_XR_infra_correlator_oper.Correlator",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-fti-cfg", "dci-fabric-interconnect"): "Cisco_IOS_XR_infra_fti_cfg.DciFabricInterconnect",
("Cisco-IOS-XR-infra-fti-cfg", "dci-fabric-interconnect"): "Cisco_IOS_XR_infra_fti_cfg.DciFabricInterconnect",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-fti-oper", "dci-fabric-interconnect"): "Cisco_IOS_XR_infra_fti_oper.DciFabricInterconnect",
("Cisco-IOS-XR-infra-fti-oper", "dci-fabric-interconnect"): "Cisco_IOS_XR_infra_fti_oper.DciFabricInterconnect",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-cfg", "banners"): "Cisco_IOS_XR_infra_infra_cfg.Banners",
("Cisco-IOS-XR-infra-infra-cfg", "banners"): "Cisco_IOS_XR_infra_infra_cfg.Banners",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-clock-linux-cfg", "clock"): "Cisco_IOS_XR_infra_infra_clock_linux_cfg.Clock",
("Cisco-IOS-XR-infra-infra-clock-linux-cfg", "clock"): "Cisco_IOS_XR_infra_infra_clock_linux_cfg.Clock",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-locale-cfg", "locale"): "Cisco_IOS_XR_infra_infra_locale_cfg.Locale",
("Cisco-IOS-XR-infra-infra-locale-cfg", "locale"): "Cisco_IOS_XR_infra_infra_locale_cfg.Locale",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-nsr-cfg", "nsr"): "Cisco_IOS_XR_infra_nsr_cfg.Nsr",
("Cisco-IOS-XR-infra-nsr-cfg", "nsr"): "Cisco_IOS_XR_infra_nsr_cfg.Nsr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-objmgr-cfg", "object-group"): "Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup",
("Cisco-IOS-XR-infra-objmgr-cfg", "object-group"): "Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-objmgr-oper", "object-group"): "Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup",
("Cisco-IOS-XR-infra-objmgr-oper", "object-group"): "Cisco_IOS_XR_infra_objmgr_oper.ObjectGroup",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-policymgr-cfg", "policy-manager"): "Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager",
("Cisco-IOS-XR-infra-policymgr-cfg", "policy-manager"): "Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-policymgr-oper", "policy-manager"): "Cisco_IOS_XR_infra_policymgr_oper.PolicyManager",
("Cisco-IOS-XR-infra-policymgr-oper", "policy-manager"): "Cisco_IOS_XR_infra_policymgr_oper.PolicyManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rcmd-cfg", "router-convergence"): "Cisco_IOS_XR_infra_rcmd_cfg.RouterConvergence",
("Cisco-IOS-XR-infra-rcmd-cfg", "router-convergence"): "Cisco_IOS_XR_infra_rcmd_cfg.RouterConvergence",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rcmd-oper", "rcmd"): "Cisco_IOS_XR_infra_rcmd_oper.Rcmd",
("Cisco-IOS-XR-infra-rcmd-oper", "rcmd"): "Cisco_IOS_XR_infra_rcmd_oper.Rcmd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rmf-oper", "redundancy"): "Cisco_IOS_XR_infra_rmf_oper.Redundancy",
("Cisco-IOS-XR-infra-rmf-oper", "redundancy"): "Cisco_IOS_XR_infra_rmf_oper.Redundancy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg", "vrfs"): "Cisco_IOS_XR_infra_rsi_cfg.Vrfs",
("Cisco-IOS-XR-infra-rsi-cfg", "vrfs"): "Cisco_IOS_XR_infra_rsi_cfg.Vrfs",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg", "global-af"): "Cisco_IOS_XR_infra_rsi_cfg.GlobalAf",
("Cisco-IOS-XR-infra-rsi-cfg", "global-af"): "Cisco_IOS_XR_infra_rsi_cfg.GlobalAf",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg", "srlg"): "Cisco_IOS_XR_infra_rsi_cfg.Srlg",
("Cisco-IOS-XR-infra-rsi-cfg", "srlg"): "Cisco_IOS_XR_infra_rsi_cfg.Srlg",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg", "vrf-groups"): "Cisco_IOS_XR_infra_rsi_cfg.VrfGroups",
("Cisco-IOS-XR-infra-rsi-cfg", "vrf-groups"): "Cisco_IOS_XR_infra_rsi_cfg.VrfGroups",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg", "selective-vrf-download"): "Cisco_IOS_XR_infra_rsi_cfg.SelectiveVrfDownload",
("Cisco-IOS-XR-infra-rsi-cfg", "selective-vrf-download"): "Cisco_IOS_XR_infra_rsi_cfg.SelectiveVrfDownload",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-oper", "vrf-group"): "Cisco_IOS_XR_infra_rsi_oper.VrfGroup",
("Cisco-IOS-XR-infra-rsi-oper", "vrf-group"): "Cisco_IOS_XR_infra_rsi_oper.VrfGroup",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-oper", "srlg"): "Cisco_IOS_XR_infra_rsi_oper.Srlg",
("Cisco-IOS-XR-infra-rsi-oper", "srlg"): "Cisco_IOS_XR_infra_rsi_oper.Srlg",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-oper", "selective-vrf-download"): "Cisco_IOS_XR_infra_rsi_oper.SelectiveVrfDownload",
("Cisco-IOS-XR-infra-rsi-oper", "selective-vrf-download"): "Cisco_IOS_XR_infra_rsi_oper.SelectiveVrfDownload",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rt-check-cfg", "rcc"): "Cisco_IOS_XR_infra_rt_check_cfg.Rcc",
("Cisco-IOS-XR-infra-rt-check-cfg", "rcc"): "Cisco_IOS_XR_infra_rt_check_cfg.Rcc",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-serg-cfg", "session-redundancy"): "Cisco_IOS_XR_infra_serg_cfg.SessionRedundancy",
("Cisco-IOS-XR-infra-serg-cfg", "session-redundancy"): "Cisco_IOS_XR_infra_serg_cfg.SessionRedundancy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-serg-oper", "session-redundancy-manager"): "Cisco_IOS_XR_infra_serg_oper.SessionRedundancyManager",
("Cisco-IOS-XR-infra-serg-oper", "session-redundancy-manager"): "Cisco_IOS_XR_infra_serg_oper.SessionRedundancyManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-serg-oper", "session-redundancy-agent"): "Cisco_IOS_XR_infra_serg_oper.SessionRedundancyAgent",
("Cisco-IOS-XR-infra-serg-oper", "session-redundancy-agent"): "Cisco_IOS_XR_infra_serg_oper.SessionRedundancyAgent",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-cfg", "sla"): "Cisco_IOS_XR_infra_sla_cfg.Sla",
("Cisco-IOS-XR-infra-sla-cfg", "sla"): "Cisco_IOS_XR_infra_sla_cfg.Sla",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-oper", "sla"): "Cisco_IOS_XR_infra_sla_oper.Sla",
("Cisco-IOS-XR-infra-sla-oper", "sla"): "Cisco_IOS_XR_infra_sla_oper.Sla",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-oper", "sla-nodes"): "Cisco_IOS_XR_infra_sla_oper.SlaNodes",
("Cisco-IOS-XR-infra-sla-oper", "sla-nodes"): "Cisco_IOS_XR_infra_sla_oper.SlaNodes",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-statsd-cfg", "statistics"): "Cisco_IOS_XR_infra_statsd_cfg.Statistics",
("Cisco-IOS-XR-infra-statsd-cfg", "statistics"): "Cisco_IOS_XR_infra_statsd_cfg.Statistics",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-statsd-oper", "infra-statistics"): "Cisco_IOS_XR_infra_statsd_oper.InfraStatistics",
("Cisco-IOS-XR-infra-statsd-oper", "infra-statistics"): "Cisco_IOS_XR_infra_statsd_oper.InfraStatistics",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-cfg", "syslog-service"): "Cisco_IOS_XR_infra_syslog_cfg.SyslogService",
("Cisco-IOS-XR-infra-syslog-cfg", "syslog-service"): "Cisco_IOS_XR_infra_syslog_cfg.SyslogService",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-cfg", "syslog"): "Cisco_IOS_XR_infra_syslog_cfg.Syslog",
("Cisco-IOS-XR-infra-syslog-cfg", "syslog"): "Cisco_IOS_XR_infra_syslog_cfg.Syslog",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-oper", "logging"): "Cisco_IOS_XR_infra_syslog_oper.Logging",
("Cisco-IOS-XR-infra-syslog-oper", "logging"): "Cisco_IOS_XR_infra_syslog_oper.Logging",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-oper", "syslog"): "Cisco_IOS_XR_infra_syslog_oper.Syslog",
("Cisco-IOS-XR-infra-syslog-oper", "syslog"): "Cisco_IOS_XR_infra_syslog_oper.Syslog",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-tc-cfg", "traffic-collector"): "Cisco_IOS_XR_infra_tc_cfg.TrafficCollector",
("Cisco-IOS-XR-infra-tc-cfg", "traffic-collector"): "Cisco_IOS_XR_infra_tc_cfg.TrafficCollector",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-tc-oper", "traffic-collector"): "Cisco_IOS_XR_infra_tc_oper.TrafficCollector",
("Cisco-IOS-XR-infra-tc-oper", "traffic-collector"): "Cisco_IOS_XR_infra_tc_oper.TrafficCollector",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-agent-oper", "pcc"): "Cisco_IOS_XR_infra_xtc_agent_oper.Pcc",
("Cisco-IOS-XR-infra-xtc-agent-oper", "pcc"): "Cisco_IOS_XR_infra_xtc_agent_oper.Pcc",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-agent-oper", "xtc"): "Cisco_IOS_XR_infra_xtc_agent_oper.Xtc",
("Cisco-IOS-XR-infra-xtc-agent-oper", "xtc"): "Cisco_IOS_XR_infra_xtc_agent_oper.Xtc",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-cfg", "pce"): "Cisco_IOS_XR_infra_xtc_cfg.Pce",
("Cisco-IOS-XR-infra-xtc-cfg", "pce"): "Cisco_IOS_XR_infra_xtc_cfg.Pce",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-oper", "pce-lsp-data"): "Cisco_IOS_XR_infra_xtc_oper.PceLspData",
("Cisco-IOS-XR-infra-xtc-oper", "pce-lsp-data"): "Cisco_IOS_XR_infra_xtc_oper.PceLspData",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-oper", "pce-peer"): "Cisco_IOS_XR_infra_xtc_oper.PcePeer",
("Cisco-IOS-XR-infra-xtc-oper", "pce-peer"): "Cisco_IOS_XR_infra_xtc_oper.PcePeer",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-oper", "pce-topology"): "Cisco_IOS_XR_infra_xtc_oper.PceTopology",
("Cisco-IOS-XR-infra-xtc-oper", "pce-topology"): "Cisco_IOS_XR_infra_xtc_oper.PceTopology",
("http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-oper", "pce"): "Cisco_IOS_XR_infra_xtc_oper.Pce",
("Cisco-IOS-XR-infra-xtc-oper", "pce"): "Cisco_IOS_XR_infra_xtc_oper.Pce",
("http://cisco.com/ns/yang/Cisco-IOS-XR-installmgr-admin-oper", "install"): "Cisco_IOS_XR_installmgr_admin_oper.Install",
("Cisco-IOS-XR-installmgr-admin-oper", "install"): "Cisco_IOS_XR_installmgr_admin_oper.Install",
("http://cisco.com/ns/yang/Cisco-IOS-XR-invmgr-cfg", "inventory-configurations"): "Cisco_IOS_XR_invmgr_cfg.InventoryConfigurations",
("Cisco-IOS-XR-invmgr-cfg", "inventory-configurations"): "Cisco_IOS_XR_invmgr_cfg.InventoryConfigurations",
("http://cisco.com/ns/yang/Cisco-IOS-XR-invmgr-oper", "inventory"): "Cisco_IOS_XR_invmgr_oper.Inventory",
("Cisco-IOS-XR-invmgr-oper", "inventory"): "Cisco_IOS_XR_invmgr_oper.Inventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-bfd-cfg", "bfd"): "Cisco_IOS_XR_ip_bfd_cfg.Bfd",
("Cisco-IOS-XR-ip-bfd-cfg", "bfd"): "Cisco_IOS_XR_ip_bfd_cfg.Bfd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-bfd-oper", "bfd"): "Cisco_IOS_XR_ip_bfd_oper.Bfd",
("Cisco-IOS-XR-ip-bfd-oper", "bfd"): "Cisco_IOS_XR_ip_bfd_oper.Bfd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-daps-cfg", "address-pool-service"): "Cisco_IOS_XR_ip_daps_cfg.AddressPoolService",
("Cisco-IOS-XR-ip-daps-cfg", "address-pool-service"): "Cisco_IOS_XR_ip_daps_cfg.AddressPoolService",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-daps-oper", "address-pool-service"): "Cisco_IOS_XR_ip_daps_oper.AddressPoolService",
("Cisco-IOS-XR-ip-daps-oper", "address-pool-service"): "Cisco_IOS_XR_ip_daps_oper.AddressPoolService",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-domain-cfg", "ip-domain"): "Cisco_IOS_XR_ip_domain_cfg.IpDomain",
("Cisco-IOS-XR-ip-domain-cfg", "ip-domain"): "Cisco_IOS_XR_ip_domain_cfg.IpDomain",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-domain-oper", "ip-domain"): "Cisco_IOS_XR_ip_domain_oper.IpDomain",
("Cisco-IOS-XR-ip-domain-oper", "ip-domain"): "Cisco_IOS_XR_ip_domain_oper.IpDomain",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-cfg", "ip-arm"): "Cisco_IOS_XR_ip_iarm_cfg.IpArm",
("Cisco-IOS-XR-ip-iarm-cfg", "ip-arm"): "Cisco_IOS_XR_ip_iarm_cfg.IpArm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-v4-oper", "ipv4arm"): "Cisco_IOS_XR_ip_iarm_v4_oper.Ipv4arm",
("Cisco-IOS-XR-ip-iarm-v4-oper", "ipv4arm"): "Cisco_IOS_XR_ip_iarm_v4_oper.Ipv4arm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-v6-oper", "ipv6arm"): "Cisco_IOS_XR_ip_iarm_v6_oper.Ipv6arm",
("Cisco-IOS-XR-ip-iarm-v6-oper", "ipv6arm"): "Cisco_IOS_XR_ip_iarm_v6_oper.Ipv6arm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-icmp-cfg", "icmp"): "Cisco_IOS_XR_ip_icmp_cfg.Icmp",
("Cisco-IOS-XR-ip-icmp-cfg", "icmp"): "Cisco_IOS_XR_ip_icmp_cfg.Icmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iep-cfg", "ip-explicit-paths"): "Cisco_IOS_XR_ip_iep_cfg.IpExplicitPaths",
("Cisco-IOS-XR-ip-iep-cfg", "ip-explicit-paths"): "Cisco_IOS_XR_ip_iep_cfg.IpExplicitPaths",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iep-oper", "explicit-paths"): "Cisco_IOS_XR_ip_iep_oper.ExplicitPaths",
("Cisco-IOS-XR-ip-iep-oper", "explicit-paths"): "Cisco_IOS_XR_ip_iep_oper.ExplicitPaths",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-mobileip-cfg", "mobile-ip"): "Cisco_IOS_XR_ip_mobileip_cfg.MobileIp",
("Cisco-IOS-XR-ip-mobileip-cfg", "mobile-ip"): "Cisco_IOS_XR_ip_mobileip_cfg.MobileIp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-mobileip-oper", "pmipv6"): "Cisco_IOS_XR_ip_mobileip_oper.Pmipv6",
("Cisco-IOS-XR-ip-mobileip-oper", "pmipv6"): "Cisco_IOS_XR_ip_mobileip_oper.Pmipv6",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-admin-oper", "ntp"): "Cisco_IOS_XR_ip_ntp_admin_oper.Ntp",
("Cisco-IOS-XR-ip-ntp-admin-oper", "ntp"): "Cisco_IOS_XR_ip_ntp_admin_oper.Ntp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-cfg", "ntp"): "Cisco_IOS_XR_ip_ntp_cfg.Ntp",
("Cisco-IOS-XR-ip-ntp-cfg", "ntp"): "Cisco_IOS_XR_ip_ntp_cfg.Ntp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-oper", "ntp"): "Cisco_IOS_XR_ip_ntp_oper.Ntp",
("Cisco-IOS-XR-ip-ntp-oper", "ntp"): "Cisco_IOS_XR_ip_ntp_oper.Ntp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-pfilter-oper", "pfilter-ma"): "Cisco_IOS_XR_ip_pfilter_oper.PfilterMa",
("Cisco-IOS-XR-ip-pfilter-oper", "pfilter-ma"): "Cisco_IOS_XR_ip_pfilter_oper.PfilterMa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-raw-cfg", "ip-raw"): "Cisco_IOS_XR_ip_raw_cfg.IpRaw",
("Cisco-IOS-XR-ip-raw-cfg", "ip-raw"): "Cisco_IOS_XR_ip_raw_cfg.IpRaw",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-cfg", "rib"): "Cisco_IOS_XR_ip_rib_cfg.Rib",
("Cisco-IOS-XR-ip-rib-cfg", "rib"): "Cisco_IOS_XR_ip_rib_cfg.Rib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv4-oper", "rib-stdby"): "Cisco_IOS_XR_ip_rib_ipv4_oper.RibStdby",
("Cisco-IOS-XR-ip-rib-ipv4-oper", "rib-stdby"): "Cisco_IOS_XR_ip_rib_ipv4_oper.RibStdby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv4-oper", "rib"): "Cisco_IOS_XR_ip_rib_ipv4_oper.Rib",
("Cisco-IOS-XR-ip-rib-ipv4-oper", "rib"): "Cisco_IOS_XR_ip_rib_ipv4_oper.Rib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv6-oper", "ipv6-rib"): "Cisco_IOS_XR_ip_rib_ipv6_oper.Ipv6Rib",
("Cisco-IOS-XR-ip-rib-ipv6-oper", "ipv6-rib"): "Cisco_IOS_XR_ip_rib_ipv6_oper.Ipv6Rib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv6-oper", "ipv6-rib-stdby"): "Cisco_IOS_XR_ip_rib_ipv6_oper.Ipv6RibStdby",
("Cisco-IOS-XR-ip-rib-ipv6-oper", "ipv6-rib-stdby"): "Cisco_IOS_XR_ip_rib_ipv6_oper.Ipv6RibStdby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rip-cfg", "rip"): "Cisco_IOS_XR_ip_rip_cfg.Rip",
("Cisco-IOS-XR-ip-rip-cfg", "rip"): "Cisco_IOS_XR_ip_rip_cfg.Rip",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rip-oper", "rip"): "Cisco_IOS_XR_ip_rip_oper.Rip",
("Cisco-IOS-XR-ip-rip-oper", "rip"): "Cisco_IOS_XR_ip_rip_oper.Rip",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rsvp-cfg", "rsvp"): "Cisco_IOS_XR_ip_rsvp_cfg.Rsvp",
("Cisco-IOS-XR-ip-rsvp-cfg", "rsvp"): "Cisco_IOS_XR_ip_rsvp_cfg.Rsvp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rsvp-oper", "rsvp-standby"): "Cisco_IOS_XR_ip_rsvp_oper.RsvpStandby",
("Cisco-IOS-XR-ip-rsvp-oper", "rsvp-standby"): "Cisco_IOS_XR_ip_rsvp_oper.RsvpStandby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rsvp-oper", "rsvp"): "Cisco_IOS_XR_ip_rsvp_oper.Rsvp",
("Cisco-IOS-XR-ip-rsvp-oper", "rsvp"): "Cisco_IOS_XR_ip_rsvp_oper.Rsvp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-sbfd-cfg", "sbfd"): "Cisco_IOS_XR_ip_sbfd_cfg.Sbfd",
("Cisco-IOS-XR-ip-sbfd-cfg", "sbfd"): "Cisco_IOS_XR_ip_sbfd_cfg.Sbfd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-sbfd-oper", "sbfd"): "Cisco_IOS_XR_ip_sbfd_oper.Sbfd",
("Cisco-IOS-XR-ip-sbfd-oper", "sbfd"): "Cisco_IOS_XR_ip_sbfd_oper.Sbfd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-static-cfg", "router-static"): "Cisco_IOS_XR_ip_static_cfg.RouterStatic",
("Cisco-IOS-XR-ip-static-cfg", "router-static"): "Cisco_IOS_XR_ip_static_cfg.RouterStatic",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-cfg", "ip-tcp"): "Cisco_IOS_XR_ip_tcp_cfg.IpTcp",
("Cisco-IOS-XR-ip-tcp-cfg", "ip-tcp"): "Cisco_IOS_XR_ip_tcp_cfg.IpTcp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-cfg", "ip"): "Cisco_IOS_XR_ip_tcp_cfg.Ip",
("Cisco-IOS-XR-ip-tcp-cfg", "ip"): "Cisco_IOS_XR_ip_tcp_cfg.Ip",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-oper", "tcp-connection"): "Cisco_IOS_XR_ip_tcp_oper.TcpConnection",
("Cisco-IOS-XR-ip-tcp-oper", "tcp-connection"): "Cisco_IOS_XR_ip_tcp_oper.TcpConnection",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-oper", "tcp"): "Cisco_IOS_XR_ip_tcp_oper.Tcp",
("Cisco-IOS-XR-ip-tcp-oper", "tcp"): "Cisco_IOS_XR_ip_tcp_oper.Tcp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-oper", "tcp-nsr"): "Cisco_IOS_XR_ip_tcp_oper.TcpNsr",
("Cisco-IOS-XR-ip-tcp-oper", "tcp-nsr"): "Cisco_IOS_XR_ip_tcp_oper.TcpNsr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-udp-cfg", "ip-udp"): "Cisco_IOS_XR_ip_udp_cfg.IpUdp",
("Cisco-IOS-XR-ip-udp-cfg", "ip-udp"): "Cisco_IOS_XR_ip_udp_cfg.IpUdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-udp-oper", "udp"): "Cisco_IOS_XR_ip_udp_oper.Udp",
("Cisco-IOS-XR-ip-udp-oper", "udp"): "Cisco_IOS_XR_ip_udp_oper.Udp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ip-udp-oper", "udp-connection"): "Cisco_IOS_XR_ip_udp_oper.UdpConnection",
("Cisco-IOS-XR-ip-udp-oper", "udp-connection"): "Cisco_IOS_XR_ip_udp_oper.UdpConnection",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-acl-cfg", "ipv4-acl-and-prefix-list"): "Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList",
("Cisco-IOS-XR-ipv4-acl-cfg", "ipv4-acl-and-prefix-list"): "Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-acl-oper", "ipv4-acl-and-prefix-list"): "Cisco_IOS_XR_ipv4_acl_oper.Ipv4AclAndPrefixList",
("Cisco-IOS-XR-ipv4-acl-oper", "ipv4-acl-and-prefix-list"): "Cisco_IOS_XR_ipv4_acl_oper.Ipv4AclAndPrefixList",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-cfg", "arp"): "Cisco_IOS_XR_ipv4_arp_cfg.Arp",
("Cisco-IOS-XR-ipv4-arp-cfg", "arp"): "Cisco_IOS_XR_ipv4_arp_cfg.Arp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-cfg", "iedge-cfg"): "Cisco_IOS_XR_ipv4_arp_cfg.IedgeCfg",
("Cisco-IOS-XR-ipv4-arp-cfg", "iedge-cfg"): "Cisco_IOS_XR_ipv4_arp_cfg.IedgeCfg",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-cfg", "arpgmp"): "Cisco_IOS_XR_ipv4_arp_cfg.Arpgmp",
("Cisco-IOS-XR-ipv4-arp-cfg", "arpgmp"): "Cisco_IOS_XR_ipv4_arp_cfg.Arpgmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-cfg", "arp-redundancy"): "Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy",
("Cisco-IOS-XR-ipv4-arp-cfg", "arp-redundancy"): "Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-oper", "arp-gmp"): "Cisco_IOS_XR_ipv4_arp_oper.ArpGmp",
("Cisco-IOS-XR-ipv4-arp-oper", "arp-gmp"): "Cisco_IOS_XR_ipv4_arp_oper.ArpGmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-oper", "arp"): "Cisco_IOS_XR_ipv4_arp_oper.Arp",
("Cisco-IOS-XR-ipv4-arp-oper", "arp"): "Cisco_IOS_XR_ipv4_arp_oper.Arp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-autorp-oper", "auto-rp"): "Cisco_IOS_XR_ipv4_autorp_oper.AutoRp",
("Cisco-IOS-XR-ipv4-autorp-oper", "auto-rp"): "Cisco_IOS_XR_ipv4_autorp_oper.AutoRp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-cfg", "bgp"): "Cisco_IOS_XR_ipv4_bgp_cfg.Bgp",
("Cisco-IOS-XR-ipv4-bgp-cfg", "bgp"): "Cisco_IOS_XR_ipv4_bgp_cfg.Bgp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-cfg", "bmp-server-all"): "Cisco_IOS_XR_ipv4_bgp_cfg.BmpServerAll",
("Cisco-IOS-XR-ipv4-bgp-cfg", "bmp-server-all"): "Cisco_IOS_XR_ipv4_bgp_cfg.BmpServerAll",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-cfg", "bmp-servers"): "Cisco_IOS_XR_ipv4_bgp_cfg.BmpServers",
("Cisco-IOS-XR-ipv4-bgp-cfg", "bmp-servers"): "Cisco_IOS_XR_ipv4_bgp_cfg.BmpServers",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-oc-oper", "oc-bgp"): "Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp",
("Cisco-IOS-XR-ipv4-bgp-oc-oper", "oc-bgp"): "Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-oper", "bgp"): "Cisco_IOS_XR_ipv4_bgp_oper.Bgp",
("Cisco-IOS-XR-ipv4-bgp-oper", "bgp"): "Cisco_IOS_XR_ipv4_bgp_oper.Bgp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-cfg", "ipv4-dhcpd"): "Cisco_IOS_XR_ipv4_dhcpd_cfg.Ipv4Dhcpd",
("Cisco-IOS-XR-ipv4-dhcpd-cfg", "ipv4-dhcpd"): "Cisco_IOS_XR_ipv4_dhcpd_cfg.Ipv4Dhcpd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-oper", "dhcp-client"): "Cisco_IOS_XR_ipv4_dhcpd_oper.DhcpClient",
("Cisco-IOS-XR-ipv4-dhcpd-oper", "dhcp-client"): "Cisco_IOS_XR_ipv4_dhcpd_oper.DhcpClient",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-oper", "ipv4-dhcpd"): "Cisco_IOS_XR_ipv4_dhcpd_oper.Ipv4Dhcpd",
("Cisco-IOS-XR-ipv4-dhcpd-oper", "ipv4-dhcpd"): "Cisco_IOS_XR_ipv4_dhcpd_oper.Ipv4Dhcpd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-filesystems-cfg", "rcp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Rcp",
("Cisco-IOS-XR-ipv4-filesystems-cfg", "rcp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Rcp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-filesystems-cfg", "ftp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Ftp",
("Cisco-IOS-XR-ipv4-filesystems-cfg", "ftp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Ftp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-filesystems-cfg", "tftp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Tftp",
("Cisco-IOS-XR-ipv4-filesystems-cfg", "tftp"): "Cisco_IOS_XR_ipv4_filesystems_cfg.Tftp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-hsrp-cfg", "hsrp"): "Cisco_IOS_XR_ipv4_hsrp_cfg.Hsrp",
("Cisco-IOS-XR-ipv4-hsrp-cfg", "hsrp"): "Cisco_IOS_XR_ipv4_hsrp_cfg.Hsrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-hsrp-oper", "hsrp"): "Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp",
("Cisco-IOS-XR-ipv4-hsrp-oper", "hsrp"): "Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-cfg", "igmp"): "Cisco_IOS_XR_ipv4_igmp_cfg.Igmp",
("Cisco-IOS-XR-ipv4-igmp-cfg", "igmp"): "Cisco_IOS_XR_ipv4_igmp_cfg.Igmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-cfg", "amt"): "Cisco_IOS_XR_ipv4_igmp_cfg.Amt",
("Cisco-IOS-XR-ipv4-igmp-cfg", "amt"): "Cisco_IOS_XR_ipv4_igmp_cfg.Amt",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-cfg", "mld"): "Cisco_IOS_XR_ipv4_igmp_cfg.Mld",
("Cisco-IOS-XR-ipv4-igmp-cfg", "mld"): "Cisco_IOS_XR_ipv4_igmp_cfg.Mld",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-oper", "mld"): "Cisco_IOS_XR_ipv4_igmp_oper.Mld",
("Cisco-IOS-XR-ipv4-igmp-oper", "mld"): "Cisco_IOS_XR_ipv4_igmp_oper.Mld",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-oper", "igmp"): "Cisco_IOS_XR_ipv4_igmp_oper.Igmp",
("Cisco-IOS-XR-ipv4-igmp-oper", "igmp"): "Cisco_IOS_XR_ipv4_igmp_oper.Igmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-io-oper", "ipv4-network"): "Cisco_IOS_XR_ipv4_io_oper.Ipv4Network",
("Cisco-IOS-XR-ipv4-io-oper", "ipv4-network"): "Cisco_IOS_XR_ipv4_io_oper.Ipv4Network",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ma-cfg", "ipv4-network-global"): "Cisco_IOS_XR_ipv4_ma_cfg.Ipv4NetworkGlobal",
("Cisco-IOS-XR-ipv4-ma-cfg", "ipv4-network-global"): "Cisco_IOS_XR_ipv4_ma_cfg.Ipv4NetworkGlobal",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ma-cfg", "subscriber-pta"): "Cisco_IOS_XR_ipv4_ma_cfg.SubscriberPta",
("Cisco-IOS-XR-ipv4-ma-cfg", "subscriber-pta"): "Cisco_IOS_XR_ipv4_ma_cfg.SubscriberPta",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-mfwd-cfg", "mfwd"): "Cisco_IOS_XR_ipv4_mfwd_cfg.Mfwd",
("Cisco-IOS-XR-ipv4-mfwd-cfg", "mfwd"): "Cisco_IOS_XR_ipv4_mfwd_cfg.Mfwd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-msdp-cfg", "msdp"): "Cisco_IOS_XR_ipv4_msdp_cfg.Msdp",
("Cisco-IOS-XR-ipv4-msdp-cfg", "msdp"): "Cisco_IOS_XR_ipv4_msdp_cfg.Msdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ospf-cfg", "ospf"): "Cisco_IOS_XR_ipv4_ospf_cfg.Ospf",
("Cisco-IOS-XR-ipv4-ospf-cfg", "ospf"): "Cisco_IOS_XR_ipv4_ospf_cfg.Ospf",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ospf-oper", "ospf"): "Cisco_IOS_XR_ipv4_ospf_oper.Ospf",
("Cisco-IOS-XR-ipv4-ospf-oper", "ospf"): "Cisco_IOS_XR_ipv4_ospf_oper.Ospf",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-cfg", "pim"): "Cisco_IOS_XR_ipv4_pim_cfg.Pim",
("Cisco-IOS-XR-ipv4-pim-cfg", "pim"): "Cisco_IOS_XR_ipv4_pim_cfg.Pim",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-oper", "pim-ma"): "Cisco_IOS_XR_ipv4_pim_oper.PimMa",
("Cisco-IOS-XR-ipv4-pim-oper", "pim-ma"): "Cisco_IOS_XR_ipv4_pim_oper.PimMa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-oper", "pim"): "Cisco_IOS_XR_ipv4_pim_oper.Pim",
("Cisco-IOS-XR-ipv4-pim-oper", "pim"): "Cisco_IOS_XR_ipv4_pim_oper.Pim",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-oper", "ipv6-pim"): "Cisco_IOS_XR_ipv4_pim_oper.Ipv6Pim",
("Cisco-IOS-XR-ipv4-pim-oper", "ipv6-pim"): "Cisco_IOS_XR_ipv4_pim_oper.Ipv6Pim",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-oper", "pim6-ma"): "Cisco_IOS_XR_ipv4_pim_oper.Pim6Ma",
("Cisco-IOS-XR-ipv4-pim-oper", "pim6-ma"): "Cisco_IOS_XR_ipv4_pim_oper.Pim6Ma",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-smiap-cfg", "ipv4-virtual"): "Cisco_IOS_XR_ipv4_smiap_cfg.Ipv4Virtual",
("Cisco-IOS-XR-ipv4-smiap-cfg", "ipv4-virtual"): "Cisco_IOS_XR_ipv4_smiap_cfg.Ipv4Virtual",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-telnet-cfg", "ipv6-telnet"): "Cisco_IOS_XR_ipv4_telnet_cfg.Ipv6Telnet",
("Cisco-IOS-XR-ipv4-telnet-cfg", "ipv6-telnet"): "Cisco_IOS_XR_ipv4_telnet_cfg.Ipv6Telnet",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-telnet-cfg", "ipv4-telnet"): "Cisco_IOS_XR_ipv4_telnet_cfg.Ipv4Telnet",
("Cisco-IOS-XR-ipv4-telnet-cfg", "ipv4-telnet"): "Cisco_IOS_XR_ipv4_telnet_cfg.Ipv4Telnet",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-telnet-mgmt-cfg", "telnet"): "Cisco_IOS_XR_ipv4_telnet_mgmt_cfg.Telnet",
("Cisco-IOS-XR-ipv4-telnet-mgmt-cfg", "telnet"): "Cisco_IOS_XR_ipv4_telnet_mgmt_cfg.Telnet",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-vrrp-cfg", "vrrp"): "Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp",
("Cisco-IOS-XR-ipv4-vrrp-cfg", "vrrp"): "Cisco_IOS_XR_ipv4_vrrp_cfg.Vrrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-vrrp-oper", "vrrp"): "Cisco_IOS_XR_ipv4_vrrp_oper.Vrrp",
("Cisco-IOS-XR-ipv4-vrrp-oper", "vrrp"): "Cisco_IOS_XR_ipv4_vrrp_oper.Vrrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-acl-cfg", "ipv6-acl-and-prefix-list"): "Cisco_IOS_XR_ipv6_acl_cfg.Ipv6AclAndPrefixList",
("Cisco-IOS-XR-ipv6-acl-cfg", "ipv6-acl-and-prefix-list"): "Cisco_IOS_XR_ipv6_acl_cfg.Ipv6AclAndPrefixList",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-acl-oper", "ipv6-acl-and-prefix-list"): "Cisco_IOS_XR_ipv6_acl_oper.Ipv6AclAndPrefixList",
("Cisco-IOS-XR-ipv6-acl-oper", "ipv6-acl-and-prefix-list"): "Cisco_IOS_XR_ipv6_acl_oper.Ipv6AclAndPrefixList",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-io-cfg", "ipv6-configuration"): "Cisco_IOS_XR_ipv6_io_cfg.Ipv6Configuration",
("Cisco-IOS-XR-ipv6-io-cfg", "ipv6-configuration"): "Cisco_IOS_XR_ipv6_io_cfg.Ipv6Configuration",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-io-oper", "ipv6-io"): "Cisco_IOS_XR_ipv6_io_oper.Ipv6Io",
("Cisco-IOS-XR-ipv6-io-oper", "ipv6-io"): "Cisco_IOS_XR_ipv6_io_oper.Ipv6Io",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ma-oper", "ipv6-network"): "Cisco_IOS_XR_ipv6_ma_oper.Ipv6Network",
("Cisco-IOS-XR-ipv6-ma-oper", "ipv6-network"): "Cisco_IOS_XR_ipv6_ma_oper.Ipv6Network",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-nd-cfg", "ipv6-neighbor"): "Cisco_IOS_XR_ipv6_nd_cfg.Ipv6Neighbor",
("Cisco-IOS-XR-ipv6-nd-cfg", "ipv6-neighbor"): "Cisco_IOS_XR_ipv6_nd_cfg.Ipv6Neighbor",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-nd-oper", "ipv6-node-discovery"): "Cisco_IOS_XR_ipv6_nd_oper.Ipv6NodeDiscovery",
("Cisco-IOS-XR-ipv6-nd-oper", "ipv6-node-discovery"): "Cisco_IOS_XR_ipv6_nd_oper.Ipv6NodeDiscovery",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-new-dhcpv6d-cfg", "dhcpv6"): "Cisco_IOS_XR_ipv6_new_dhcpv6d_cfg.Dhcpv6",
("Cisco-IOS-XR-ipv6-new-dhcpv6d-cfg", "dhcpv6"): "Cisco_IOS_XR_ipv6_new_dhcpv6d_cfg.Dhcpv6",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-new-dhcpv6d-oper", "dhcpv6"): "Cisco_IOS_XR_ipv6_new_dhcpv6d_oper.Dhcpv6",
("Cisco-IOS-XR-ipv6-new-dhcpv6d-oper", "dhcpv6"): "Cisco_IOS_XR_ipv6_new_dhcpv6d_oper.Dhcpv6",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ospfv3-cfg", "ospfv3"): "Cisco_IOS_XR_ipv6_ospfv3_cfg.Ospfv3",
("Cisco-IOS-XR-ipv6-ospfv3-cfg", "ospfv3"): "Cisco_IOS_XR_ipv6_ospfv3_cfg.Ospfv3",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ospfv3-oper", "ospfv3"): "Cisco_IOS_XR_ipv6_ospfv3_oper.Ospfv3",
("Cisco-IOS-XR-ipv6-ospfv3-oper", "ospfv3"): "Cisco_IOS_XR_ipv6_ospfv3_oper.Ospfv3",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-smiap-cfg", "ipv6-virtual"): "Cisco_IOS_XR_ipv6_smiap_cfg.Ipv6Virtual",
("Cisco-IOS-XR-ipv6-smiap-cfg", "ipv6-virtual"): "Cisco_IOS_XR_ipv6_smiap_cfg.Ipv6Virtual",
("http://cisco.com/ns/yang/Cisco-IOS-XR-kim-tpa-cfg", "tpa"): "Cisco_IOS_XR_kim_tpa_cfg.Tpa",
("Cisco-IOS-XR-kim-tpa-cfg", "tpa"): "Cisco_IOS_XR_kim_tpa_cfg.Tpa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-cfg", "ethernet-features"): "Cisco_IOS_XR_l2_eth_infra_cfg.EthernetFeatures",
("Cisco-IOS-XR-l2-eth-infra-cfg", "ethernet-features"): "Cisco_IOS_XR_l2_eth_infra_cfg.EthernetFeatures",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-oper", "mac-accounting"): "Cisco_IOS_XR_l2_eth_infra_oper.MacAccounting",
("Cisco-IOS-XR-l2-eth-infra-oper", "mac-accounting"): "Cisco_IOS_XR_l2_eth_infra_oper.MacAccounting",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-oper", "vlan"): "Cisco_IOS_XR_l2_eth_infra_oper.Vlan",
("Cisco-IOS-XR-l2-eth-infra-oper", "vlan"): "Cisco_IOS_XR_l2_eth_infra_oper.Vlan",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-oper", "ethernet-encapsulation"): "Cisco_IOS_XR_l2_eth_infra_oper.EthernetEncapsulation",
("Cisco-IOS-XR-l2-eth-infra-oper", "ethernet-encapsulation"): "Cisco_IOS_XR_l2_eth_infra_oper.EthernetEncapsulation",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2rib-oper", "l2rib"): "Cisco_IOS_XR_l2rib_oper.L2rib",
("Cisco-IOS-XR-l2rib-oper", "l2rib"): "Cisco_IOS_XR_l2rib_oper.L2rib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-cfg", "l2vpn"): "Cisco_IOS_XR_l2vpn_cfg.L2vpn",
("Cisco-IOS-XR-l2vpn-cfg", "l2vpn"): "Cisco_IOS_XR_l2vpn_cfg.L2vpn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-cfg", "generic-interface-lists"): "Cisco_IOS_XR_l2vpn_cfg.GenericInterfaceLists",
("Cisco-IOS-XR-l2vpn-cfg", "generic-interface-lists"): "Cisco_IOS_XR_l2vpn_cfg.GenericInterfaceLists",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-cfg", "evpn"): "Cisco_IOS_XR_l2vpn_cfg.Evpn",
("Cisco-IOS-XR-l2vpn-cfg", "evpn"): "Cisco_IOS_XR_l2vpn_cfg.Evpn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-oper", "l2vpn-forwarding"): "Cisco_IOS_XR_l2vpn_oper.L2vpnForwarding",
("Cisco-IOS-XR-l2vpn-oper", "l2vpn-forwarding"): "Cisco_IOS_XR_l2vpn_oper.L2vpnForwarding",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-oper", "l2vpnv2"): "Cisco_IOS_XR_l2vpn_oper.L2vpnv2",
("Cisco-IOS-XR-l2vpn-oper", "l2vpnv2"): "Cisco_IOS_XR_l2vpn_oper.L2vpnv2",
("http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-oper", "generic-interface-list-v2"): "Cisco_IOS_XR_l2vpn_oper.GenericInterfaceListV2",
("Cisco-IOS-XR-l2vpn-oper", "generic-interface-list-v2"): "Cisco_IOS_XR_l2vpn_oper.GenericInterfaceListV2",
("http://cisco.com/ns/yang/Cisco-IOS-XR-li-cfg", "lawful-intercept"): "Cisco_IOS_XR_li_cfg.LawfulIntercept",
("Cisco-IOS-XR-li-cfg", "lawful-intercept"): "Cisco_IOS_XR_li_cfg.LawfulIntercept",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-cfg", "keychains"): "Cisco_IOS_XR_lib_keychain_cfg.Keychains",
("Cisco-IOS-XR-lib-keychain-cfg", "keychains"): "Cisco_IOS_XR_lib_keychain_cfg.Keychains",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-masterkey-aes-cfg", "password"): "Cisco_IOS_XR_lib_keychain_masterkey_aes_cfg.Password",
("Cisco-IOS-XR-lib-keychain-masterkey-aes-cfg", "password"): "Cisco_IOS_XR_lib_keychain_masterkey_aes_cfg.Password",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-oper", "keychain"): "Cisco_IOS_XR_lib_keychain_oper.Keychain",
("Cisco-IOS-XR-lib-keychain-oper", "keychain"): "Cisco_IOS_XR_lib_keychain_oper.Keychain",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lib-mpp-cfg", "control-plane"): "Cisco_IOS_XR_lib_mpp_cfg.ControlPlane",
("Cisco-IOS-XR-lib-mpp-cfg", "control-plane"): "Cisco_IOS_XR_lib_mpp_cfg.ControlPlane",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lib-mpp-oper", "management-plane-protection"): "Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection",
("Cisco-IOS-XR-lib-mpp-oper", "management-plane-protection"): "Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection",
("http://cisco.com/ns/yang/Cisco-IOS-XR-linux-os-heap-summary-oper", "heap-summary"): "Cisco_IOS_XR_linux_os_heap_summary_oper.HeapSummary",
("Cisco-IOS-XR-linux-os-heap-summary-oper", "heap-summary"): "Cisco_IOS_XR_linux_os_heap_summary_oper.HeapSummary",
("http://cisco.com/ns/yang/Cisco-IOS-XR-linux-os-reboot-history-oper", "reboot-history"): "Cisco_IOS_XR_linux_os_reboot_history_oper.RebootHistory",
("Cisco-IOS-XR-linux-os-reboot-history-oper", "reboot-history"): "Cisco_IOS_XR_linux_os_reboot_history_oper.RebootHistory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lmp-cfg", "lmp"): "Cisco_IOS_XR_lmp_cfg.Lmp",
("Cisco-IOS-XR-lmp-cfg", "lmp"): "Cisco_IOS_XR_lmp_cfg.Lmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lmp-oper", "lmp"): "Cisco_IOS_XR_lmp_oper.Lmp",
("Cisco-IOS-XR-lmp-oper", "lmp"): "Cisco_IOS_XR_lmp_oper.Lmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-ifib-oper", "lpts-ifib"): "Cisco_IOS_XR_lpts_ifib_oper.LptsIfib",
("Cisco-IOS-XR-lpts-ifib-oper", "lpts-ifib"): "Cisco_IOS_XR_lpts_ifib_oper.LptsIfib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-lib-cfg", "lpts"): "Cisco_IOS_XR_lpts_lib_cfg.Lpts",
("Cisco-IOS-XR-lpts-lib-cfg", "lpts"): "Cisco_IOS_XR_lpts_lib_cfg.Lpts",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-pa-oper", "lpts-pa"): "Cisco_IOS_XR_lpts_pa_oper.LptsPa",
("Cisco-IOS-XR-lpts-pa-oper", "lpts-pa"): "Cisco_IOS_XR_lpts_pa_oper.LptsPa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-pre-ifib-oper", "lpts-pifib"): "Cisco_IOS_XR_lpts_pre_ifib_oper.LptsPifib_",
("Cisco-IOS-XR-lpts-pre-ifib-oper", "lpts-pifib"): "Cisco_IOS_XR_lpts_pre_ifib_oper.LptsPifib_",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-ems-cfg", "grpc"): "Cisco_IOS_XR_man_ems_cfg.Grpc",
("Cisco-IOS-XR-man-ems-cfg", "grpc"): "Cisco_IOS_XR_man_ems_cfg.Grpc",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-ems-oper", "grpc"): "Cisco_IOS_XR_man_ems_oper.Grpc",
("Cisco-IOS-XR-man-ems-oper", "grpc"): "Cisco_IOS_XR_man_ems_oper.Grpc",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-ipsla-cfg", "ipsla"): "Cisco_IOS_XR_man_ipsla_cfg.Ipsla",
("Cisco-IOS-XR-man-ipsla-cfg", "ipsla"): "Cisco_IOS_XR_man_ipsla_cfg.Ipsla",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-ipsla-oper", "ipsla"): "Cisco_IOS_XR_man_ipsla_oper.Ipsla",
("Cisco-IOS-XR-man-ipsla-oper", "ipsla"): "Cisco_IOS_XR_man_ipsla_oper.Ipsla",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-netconf-cfg", "netconf-yang"): "Cisco_IOS_XR_man_netconf_cfg.NetconfYang",
("Cisco-IOS-XR-man-netconf-cfg", "netconf-yang"): "Cisco_IOS_XR_man_netconf_cfg.NetconfYang",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-cfg", "xr-xml"): "Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml",
("Cisco-IOS-XR-man-xml-ttyagent-cfg", "xr-xml"): "Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-cfg", "netconf"): "Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf",
("Cisco-IOS-XR-man-xml-ttyagent-cfg", "netconf"): "Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-oper", "netconf"): "Cisco_IOS_XR_man_xml_ttyagent_oper.Netconf",
("Cisco-IOS-XR-man-xml-ttyagent-oper", "netconf"): "Cisco_IOS_XR_man_xml_ttyagent_oper.Netconf",
("http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-oper", "xr-xml"): "Cisco_IOS_XR_man_xml_ttyagent_oper.XrXml",
("Cisco-IOS-XR-man-xml-ttyagent-oper", "xr-xml"): "Cisco_IOS_XR_man_xml_ttyagent_oper.XrXml",
("http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-object-tracking-cfg", "object-trackings"): "Cisco_IOS_XR_manageability_object_tracking_cfg.ObjectTrackings",
("Cisco-IOS-XR-manageability-object-tracking-cfg", "object-trackings"): "Cisco_IOS_XR_manageability_object_tracking_cfg.ObjectTrackings",
("http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-object-tracking-oper", "object-tracking"): "Cisco_IOS_XR_manageability_object_tracking_oper.ObjectTracking",
("Cisco-IOS-XR-manageability-object-tracking-oper", "object-tracking"): "Cisco_IOS_XR_manageability_object_tracking_oper.ObjectTracking",
("http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-perfmgmt-cfg", "perf-mgmt"): "Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt",
("Cisco-IOS-XR-manageability-perfmgmt-cfg", "perf-mgmt"): "Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt",
("http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-perfmgmt-oper", "perf-mgmt"): "Cisco_IOS_XR_manageability_perfmgmt_oper.PerfMgmt",
("Cisco-IOS-XR-manageability-perfmgmt-oper", "perf-mgmt"): "Cisco_IOS_XR_manageability_perfmgmt_oper.PerfMgmt",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mdrv-lib-cfg", "fast-shutdown"): "Cisco_IOS_XR_mdrv_lib_cfg.FastShutdown",
("Cisco-IOS-XR-mdrv-lib-cfg", "fast-shutdown"): "Cisco_IOS_XR_mdrv_lib_cfg.FastShutdown",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mediasvr-linux-oper", "media-svr"): "Cisco_IOS_XR_mediasvr_linux_oper.MediaSvr",
("Cisco-IOS-XR-mediasvr-linux-oper", "media-svr"): "Cisco_IOS_XR_mediasvr_linux_oper.MediaSvr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-io-oper", "mpls-ea"): "Cisco_IOS_XR_mpls_io_oper.MplsEa",
("Cisco-IOS-XR-mpls-io-oper", "mpls-ea"): "Cisco_IOS_XR_mpls_io_oper.MplsEa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-io-oper", "mpls-ma"): "Cisco_IOS_XR_mpls_io_oper.MplsMa",
("Cisco-IOS-XR-mpls-io-oper", "mpls-ma"): "Cisco_IOS_XR_mpls_io_oper.MplsMa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-cfg", "mpls-ldp"): "Cisco_IOS_XR_mpls_ldp_cfg.MplsLdp",
("Cisco-IOS-XR-mpls-ldp-cfg", "mpls-ldp"): "Cisco_IOS_XR_mpls_ldp_cfg.MplsLdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-oper", "mpls-ldp"): "Cisco_IOS_XR_mpls_ldp_oper.MplsLdp",
("Cisco-IOS-XR-mpls-ldp-oper", "mpls-ldp"): "Cisco_IOS_XR_mpls_ldp_oper.MplsLdp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-lsd-cfg", "mpls-lsd"): "Cisco_IOS_XR_mpls_lsd_cfg.MplsLsd",
("Cisco-IOS-XR-mpls-lsd-cfg", "mpls-lsd"): "Cisco_IOS_XR_mpls_lsd_cfg.MplsLsd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-lsd-oper", "mpls-lsd-nodes"): "Cisco_IOS_XR_mpls_lsd_oper.MplsLsdNodes",
("Cisco-IOS-XR-mpls-lsd-oper", "mpls-lsd-nodes"): "Cisco_IOS_XR_mpls_lsd_oper.MplsLsdNodes",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-lsd-oper", "mpls-lsd"): "Cisco_IOS_XR_mpls_lsd_oper.MplsLsd",
("Cisco-IOS-XR-mpls-lsd-oper", "mpls-lsd"): "Cisco_IOS_XR_mpls_lsd_oper.MplsLsd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-oam-cfg", "mpls-oam"): "Cisco_IOS_XR_mpls_oam_cfg.MplsOam",
("Cisco-IOS-XR-mpls-oam-cfg", "mpls-oam"): "Cisco_IOS_XR_mpls_oam_cfg.MplsOam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-oam-oper", "mpls-oam"): "Cisco_IOS_XR_mpls_oam_oper.MplsOam",
("Cisco-IOS-XR-mpls-oam-oper", "mpls-oam"): "Cisco_IOS_XR_mpls_oam_oper.MplsOam",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-static-cfg", "mpls-static"): "Cisco_IOS_XR_mpls_static_cfg.MplsStatic",
("Cisco-IOS-XR-mpls-static-cfg", "mpls-static"): "Cisco_IOS_XR_mpls_static_cfg.MplsStatic",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-static-oper", "mpls-static"): "Cisco_IOS_XR_mpls_static_oper.MplsStatic",
("Cisco-IOS-XR-mpls-static-oper", "mpls-static"): "Cisco_IOS_XR_mpls_static_oper.MplsStatic",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-cfg", "mpls-te"): "Cisco_IOS_XR_mpls_te_cfg.MplsTe",
("Cisco-IOS-XR-mpls-te-cfg", "mpls-te"): "Cisco_IOS_XR_mpls_te_cfg.MplsTe",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-te"): "Cisco_IOS_XR_mpls_te_oper.MplsTe",
("Cisco-IOS-XR-mpls-te-oper", "mpls-te"): "Cisco_IOS_XR_mpls_te_oper.MplsTe",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-lcac-standby"): "Cisco_IOS_XR_mpls_te_oper.MplsLcacStandby",
("Cisco-IOS-XR-mpls-te-oper", "mpls-lcac-standby"): "Cisco_IOS_XR_mpls_te_oper.MplsLcacStandby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-lcac"): "Cisco_IOS_XR_mpls_te_oper.MplsLcac",
("Cisco-IOS-XR-mpls-te-oper", "mpls-lcac"): "Cisco_IOS_XR_mpls_te_oper.MplsLcac",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-pce"): "Cisco_IOS_XR_mpls_te_oper.MplsPce",
("Cisco-IOS-XR-mpls-te-oper", "mpls-pce"): "Cisco_IOS_XR_mpls_te_oper.MplsPce",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-pce-stdby"): "Cisco_IOS_XR_mpls_te_oper.MplsPceStdby",
("Cisco-IOS-XR-mpls-te-oper", "mpls-pce-stdby"): "Cisco_IOS_XR_mpls_te_oper.MplsPceStdby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-tp"): "Cisco_IOS_XR_mpls_te_oper.MplsTp",
("Cisco-IOS-XR-mpls-te-oper", "mpls-tp"): "Cisco_IOS_XR_mpls_te_oper.MplsTp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper", "mpls-te-standby"): "Cisco_IOS_XR_mpls_te_oper.MplsTeStandby",
("Cisco-IOS-XR-mpls-te-oper", "mpls-te-standby"): "Cisco_IOS_XR_mpls_te_oper.MplsTeStandby",
("http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-vpn-oper", "l3vpn"): "Cisco_IOS_XR_mpls_vpn_oper.L3vpn",
("Cisco-IOS-XR-mpls-vpn-oper", "l3vpn"): "Cisco_IOS_XR_mpls_vpn_oper.L3vpn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ncs5k-fea-pfilter-nonatomic-cfg", "hardware"): "Cisco_IOS_XR_ncs5k_fea_pfilter_nonatomic_cfg.Hardware",
("Cisco-IOS-XR-ncs5k-fea-pfilter-nonatomic-cfg", "hardware"): "Cisco_IOS_XR_ncs5k_fea_pfilter_nonatomic_cfg.Hardware",
("http://cisco.com/ns/yang/Cisco-IOS-XR-nto-misc-oper", "memory-summary"): "Cisco_IOS_XR_nto_misc_oper.MemorySummary",
("Cisco-IOS-XR-nto-misc-oper", "memory-summary"): "Cisco_IOS_XR_nto_misc_oper.MemorySummary",
("http://cisco.com/ns/yang/Cisco-IOS-XR-optics-driver-quad-cfg", "node"): "Cisco_IOS_XR_optics_driver_quad_cfg.Node",
("Cisco-IOS-XR-optics-driver-quad-cfg", "node"): "Cisco_IOS_XR_optics_driver_quad_cfg.Node",
("http://cisco.com/ns/yang/Cisco-IOS-XR-parser-cfg", "parser"): "Cisco_IOS_XR_parser_cfg.Parser",
("Cisco-IOS-XR-parser-cfg", "parser"): "Cisco_IOS_XR_parser_cfg.Parser",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-bng-cfg", "bng-pbr"): "Cisco_IOS_XR_pbr_bng_cfg.BngPbr",
("Cisco-IOS-XR-pbr-bng-cfg", "bng-pbr"): "Cisco_IOS_XR_pbr_bng_cfg.BngPbr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-oper", "pbr"): "Cisco_IOS_XR_pbr_oper.Pbr",
("Cisco-IOS-XR-pbr-oper", "pbr"): "Cisco_IOS_XR_pbr_oper.Pbr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vrf-policy-cfg", "vrf-policy"): "Cisco_IOS_XR_pbr_vrf_policy_cfg.VrfPolicy",
("Cisco-IOS-XR-pbr-vrf-policy-cfg", "vrf-policy"): "Cisco_IOS_XR_pbr_vrf_policy_cfg.VrfPolicy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vservice-ea-oper", "service-function-chaining"): "Cisco_IOS_XR_pbr_vservice_ea_oper.ServiceFunctionChaining",
("Cisco-IOS-XR-pbr-vservice-ea-oper", "service-function-chaining"): "Cisco_IOS_XR_pbr_vservice_ea_oper.ServiceFunctionChaining",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vservice-mgr-oper", "global-service-function-chaining"): "Cisco_IOS_XR_pbr_vservice_mgr_oper.GlobalServiceFunctionChaining",
("Cisco-IOS-XR-pbr-vservice-mgr-oper", "global-service-function-chaining"): "Cisco_IOS_XR_pbr_vservice_mgr_oper.GlobalServiceFunctionChaining",
("http://cisco.com/ns/yang/Cisco-IOS-XR-perf-meas-cfg", "performance-measurement"): "Cisco_IOS_XR_perf_meas_cfg.PerformanceMeasurement",
("Cisco-IOS-XR-perf-meas-cfg", "performance-measurement"): "Cisco_IOS_XR_perf_meas_cfg.PerformanceMeasurement",
("http://cisco.com/ns/yang/Cisco-IOS-XR-perf-meas-oper", "performance-measurement"): "Cisco_IOS_XR_perf_meas_oper.PerformanceMeasurement",
("Cisco-IOS-XR-perf-meas-oper", "performance-measurement"): "Cisco_IOS_XR_perf_meas_oper.PerformanceMeasurement",
("http://cisco.com/ns/yang/Cisco-IOS-XR-perf-meas-oper", "performance-measurement-responder"): "Cisco_IOS_XR_perf_meas_oper.PerformanceMeasurementResponder",
("Cisco-IOS-XR-perf-meas-oper", "performance-measurement-responder"): "Cisco_IOS_XR_perf_meas_oper.PerformanceMeasurementResponder",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper", "controllers"): "Cisco_IOS_XR_pfi_im_cmd_ctrlr_oper.Controllers",
("Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper", "controllers"): "Cisco_IOS_XR_pfi_im_cmd_ctrlr_oper.Controllers",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-oper", "interfaces"): "Cisco_IOS_XR_pfi_im_cmd_oper.Interfaces",
("Cisco-IOS-XR-pfi-im-cmd-oper", "interfaces"): "Cisco_IOS_XR_pfi_im_cmd_oper.Interfaces",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pfm-oper", "platform-fault-manager"): "Cisco_IOS_XR_pfm_oper.PlatformFaultManager",
("Cisco-IOS-XR-pfm-oper", "platform-fault-manager"): "Cisco_IOS_XR_pfm_oper.PlatformFaultManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-ng-oper", "platform"): "Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform",
("Cisco-IOS-XR-plat-chas-invmgr-ng-oper", "platform"): "Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform",
("http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-ng-oper", "platform-inventory"): "Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory",
("Cisco-IOS-XR-plat-chas-invmgr-ng-oper", "platform-inventory"): "Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-oper", "platform"): "Cisco_IOS_XR_plat_chas_invmgr_oper.Platform",
("Cisco-IOS-XR-plat-chas-invmgr-oper", "platform"): "Cisco_IOS_XR_plat_chas_invmgr_oper.Platform",
("http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-oper", "platform-inventory"): "Cisco_IOS_XR_plat_chas_invmgr_oper.PlatformInventory",
("Cisco-IOS-XR-plat-chas-invmgr-oper", "platform-inventory"): "Cisco_IOS_XR_plat_chas_invmgr_oper.PlatformInventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pmengine-oper", "performance-management"): "Cisco_IOS_XR_pmengine_oper.PerformanceManagement",
("Cisco-IOS-XR-pmengine-oper", "performance-management"): "Cisco_IOS_XR_pmengine_oper.PerformanceManagement",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pmengine-oper", "performance-management-history"): "Cisco_IOS_XR_pmengine_oper.PerformanceManagementHistory",
("Cisco-IOS-XR-pmengine-oper", "performance-management-history"): "Cisco_IOS_XR_pmengine_oper.PerformanceManagementHistory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-policy-repository-cfg", "routing-policy"): "Cisco_IOS_XR_policy_repository_cfg.RoutingPolicy",
("Cisco-IOS-XR-policy-repository-cfg", "routing-policy"): "Cisco_IOS_XR_policy_repository_cfg.RoutingPolicy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-policy-repository-oper", "routing-policy"): "Cisco_IOS_XR_policy_repository_oper.RoutingPolicy",
("Cisco-IOS-XR-policy-repository-oper", "routing-policy"): "Cisco_IOS_XR_policy_repository_oper.RoutingPolicy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-policy-repository-oper", "routing-policy-shadow"): "Cisco_IOS_XR_policy_repository_oper.RoutingPolicyShadow",
("Cisco-IOS-XR-policy-repository-oper", "routing-policy-shadow"): "Cisco_IOS_XR_policy_repository_oper.RoutingPolicyShadow",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ea-oper", "pppea"): "Cisco_IOS_XR_ppp_ea_oper.Pppea",
("Cisco-IOS-XR-ppp-ea-oper", "pppea"): "Cisco_IOS_XR_ppp_ea_oper.Pppea",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-oper", "ppp"): "Cisco_IOS_XR_ppp_ma_oper.Ppp",
("Cisco-IOS-XR-ppp-ma-oper", "ppp"): "Cisco_IOS_XR_ppp_ma_oper.Ppp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-ssrp-cfg", "ssrp"): "Cisco_IOS_XR_ppp_ma_ssrp_cfg.Ssrp",
("Cisco-IOS-XR-ppp-ma-ssrp-cfg", "ssrp"): "Cisco_IOS_XR_ppp_ma_ssrp_cfg.Ssrp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-syslog-cfg", "ppp"): "Cisco_IOS_XR_ppp_ma_syslog_cfg.Ppp",
("Cisco-IOS-XR-ppp-ma-syslog-cfg", "ppp"): "Cisco_IOS_XR_ppp_ma_syslog_cfg.Ppp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-pppoe-ea-oper", "pppoe-ea"): "Cisco_IOS_XR_pppoe_ea_oper.PppoeEa",
("Cisco-IOS-XR-pppoe-ea-oper", "pppoe-ea"): "Cisco_IOS_XR_pppoe_ea_oper.PppoeEa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_cfg.HardwareModule",
("Cisco-IOS-XR-prm-hwmod-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_cfg.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-profile-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_profile_cfg.HardwareModule",
("Cisco-IOS-XR-prm-hwmod-profile-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_profile_cfg.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-sr-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_sr_cfg.HardwareModule",
("Cisco-IOS-XR-prm-hwmod-sr-cfg", "hardware-module"): "Cisco_IOS_XR_prm_hwmod_sr_cfg.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-prm-server-oper", "hardware-module"): "Cisco_IOS_XR_prm_server_oper.HardwareModule",
("Cisco-IOS-XR-prm-server-oper", "hardware-module"): "Cisco_IOS_XR_prm_server_oper.HardwareModule",
("http://cisco.com/ns/yang/Cisco-IOS-XR-prm-server-oper", "prm"): "Cisco_IOS_XR_prm_server_oper.Prm",
("Cisco-IOS-XR-prm-server-oper", "prm"): "Cisco_IOS_XR_prm_server_oper.Prm",
("http://cisco.com/ns/yang/Cisco-IOS-XR-procfind-oper", "proc-distribution"): "Cisco_IOS_XR_procfind_oper.ProcDistribution",
("Cisco-IOS-XR-procfind-oper", "proc-distribution"): "Cisco_IOS_XR_procfind_oper.ProcDistribution",
("http://cisco.com/ns/yang/Cisco-IOS-XR-procmem-oper", "processes-memory"): "Cisco_IOS_XR_procmem_oper.ProcessesMemory",
("Cisco-IOS-XR-procmem-oper", "processes-memory"): "Cisco_IOS_XR_procmem_oper.ProcessesMemory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-cfg", "ptp"): "Cisco_IOS_XR_ptp_cfg.Ptp",
("Cisco-IOS-XR-ptp-cfg", "ptp"): "Cisco_IOS_XR_ptp_cfg.Ptp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-oper", "ptp"): "Cisco_IOS_XR_ptp_oper.Ptp",
("Cisco-IOS-XR-ptp-oper", "ptp"): "Cisco_IOS_XR_ptp_oper.Ptp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-cfg", "qos"): "Cisco_IOS_XR_qos_ma_cfg.Qos",
("Cisco-IOS-XR-qos-ma-cfg", "qos"): "Cisco_IOS_XR_qos_ma_cfg.Qos",
("http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-oper", "qos"): "Cisco_IOS_XR_qos_ma_oper.Qos",
("Cisco-IOS-XR-qos-ma-oper", "qos"): "Cisco_IOS_XR_qos_ma_oper.Qos",
("http://cisco.com/ns/yang/Cisco-IOS-XR-rgmgr-cfg", "redundancy-group-manager"): "Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager",
("Cisco-IOS-XR-rgmgr-cfg", "redundancy-group-manager"): "Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-rgmgr-oper", "redundancy-group-manager"): "Cisco_IOS_XR_rgmgr_oper.RedundancyGroupManager",
("Cisco-IOS-XR-rgmgr-oper", "redundancy-group-manager"): "Cisco_IOS_XR_rgmgr_oper.RedundancyGroupManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sdr-invmgr-diag-oper", "diag"): "Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag",
("Cisco-IOS-XR-sdr-invmgr-diag-oper", "diag"): "Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sdr-invmgr-oper", "sdr-inventory"): "Cisco_IOS_XR_sdr_invmgr_oper.SdrInventory",
("Cisco-IOS-XR-sdr-invmgr-oper", "sdr-inventory"): "Cisco_IOS_XR_sdr_invmgr_oper.SdrInventory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-cfg", "sr"): "Cisco_IOS_XR_segment_routing_ms_cfg.Sr",
("Cisco-IOS-XR-segment-routing-ms-cfg", "sr"): "Cisco_IOS_XR_segment_routing_ms_cfg.Sr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-common-cfg", "sr"): "Cisco_IOS_XR_segment_routing_ms_common_cfg.Sr",
("Cisco-IOS-XR-segment-routing-ms-common-cfg", "sr"): "Cisco_IOS_XR_segment_routing_ms_common_cfg.Sr",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-oper", "standby-srms"): "Cisco_IOS_XR_segment_routing_ms_oper.StandbySrms",
("Cisco-IOS-XR-segment-routing-ms-oper", "standby-srms"): "Cisco_IOS_XR_segment_routing_ms_oper.StandbySrms",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-oper", "srms"): "Cisco_IOS_XR_segment_routing_ms_oper.Srms",
("Cisco-IOS-XR-segment-routing-ms-oper", "srms"): "Cisco_IOS_XR_segment_routing_ms_oper.Srms",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-oper", "srlb"): "Cisco_IOS_XR_segment_routing_ms_oper.Srlb",
("Cisco-IOS-XR-segment-routing-ms-oper", "srlb"): "Cisco_IOS_XR_segment_routing_ms_oper.Srlb",
("http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-srv6-oper", "srv6"): "Cisco_IOS_XR_segment_routing_srv6_oper.Srv6",
("Cisco-IOS-XR-segment-routing-srv6-oper", "srv6"): "Cisco_IOS_XR_segment_routing_srv6_oper.Srv6",
("http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-cfg", "host-names"): "Cisco_IOS_XR_shellutil_cfg.HostNames",
("Cisco-IOS-XR-shellutil-cfg", "host-names"): "Cisco_IOS_XR_shellutil_cfg.HostNames",
("http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-filesystem-oper", "file-system"): "Cisco_IOS_XR_shellutil_filesystem_oper.FileSystem",
("Cisco-IOS-XR-shellutil-filesystem-oper", "file-system"): "Cisco_IOS_XR_shellutil_filesystem_oper.FileSystem",
("http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-oper", "system-time"): "Cisco_IOS_XR_shellutil_oper.SystemTime",
("Cisco-IOS-XR-shellutil-oper", "system-time"): "Cisco_IOS_XR_shellutil_oper.SystemTime",
("http://cisco.com/ns/yang/Cisco-IOS-XR-show-fpd-loc-ng-oper", "show-fpd"): "Cisco_IOS_XR_show_fpd_loc_ng_oper.ShowFpd",
("Cisco-IOS-XR-show-fpd-loc-ng-oper", "show-fpd"): "Cisco_IOS_XR_show_fpd_loc_ng_oper.ShowFpd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-skp-qos-oper", "platform-qos"): "Cisco_IOS_XR_skp_qos_oper.PlatformQos",
("Cisco-IOS-XR-skp-qos-oper", "platform-qos"): "Cisco_IOS_XR_skp_qos_oper.PlatformQos",
("http://cisco.com/ns/yang/Cisco-IOS-XR-skp-qos-oper", "platform-qos-ea"): "Cisco_IOS_XR_skp_qos_oper.PlatformQosEa",
("Cisco-IOS-XR-skp-qos-oper", "platform-qos-ea"): "Cisco_IOS_XR_skp_qos_oper.PlatformQosEa",
("http://cisco.com/ns/yang/Cisco-IOS-XR-skywarp-netflow-oper", "net-flow"): "Cisco_IOS_XR_skywarp_netflow_oper.NetFlow",
("Cisco-IOS-XR-skywarp-netflow-oper", "net-flow"): "Cisco_IOS_XR_skywarp_netflow_oper.NetFlow",
("http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-agent-cfg", "snmp"): "Cisco_IOS_XR_snmp_agent_cfg.Snmp",
("Cisco-IOS-XR-snmp-agent-cfg", "snmp"): "Cisco_IOS_XR_snmp_agent_cfg.Snmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-agent-cfg", "mib"): "Cisco_IOS_XR_snmp_agent_cfg.Mib",
("Cisco-IOS-XR-snmp-agent-cfg", "mib"): "Cisco_IOS_XR_snmp_agent_cfg.Mib",
("http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-agent-oper", "snmp"): "Cisco_IOS_XR_snmp_agent_oper.Snmp",
("Cisco-IOS-XR-snmp-agent-oper", "snmp"): "Cisco_IOS_XR_snmp_agent_oper.Snmp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-spirit-corehelper-cfg", "exception"): "Cisco_IOS_XR_spirit_corehelper_cfg.Exception",
("Cisco-IOS-XR-spirit-corehelper-cfg", "exception"): "Cisco_IOS_XR_spirit_corehelper_cfg.Exception",
("http://cisco.com/ns/yang/Cisco-IOS-XR-spirit-install-instmgr-oper", "software-install"): "Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall",
("Cisco-IOS-XR-spirit-install-instmgr-oper", "software-install"): "Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sse-span-oper", "ssespan"): "Cisco_IOS_XR_sse_span_oper.Ssespan",
("Cisco-IOS-XR-sse-span-oper", "ssespan"): "Cisco_IOS_XR_sse_span_oper.Ssespan",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-accounting-cfg", "subscriber-accounting"): "Cisco_IOS_XR_subscriber_accounting_cfg.SubscriberAccounting",
("Cisco-IOS-XR-subscriber-accounting-cfg", "subscriber-accounting"): "Cisco_IOS_XR_subscriber_accounting_cfg.SubscriberAccounting",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-accounting-oper", "subscriber-accounting"): "Cisco_IOS_XR_subscriber_accounting_oper.SubscriberAccounting",
("Cisco-IOS-XR-subscriber-accounting-oper", "subscriber-accounting"): "Cisco_IOS_XR_subscriber_accounting_oper.SubscriberAccounting",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg", "dynamic-template"): "Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate",
("Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg", "dynamic-template"): "Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-ipsub-oper", "ip-subscriber"): "Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber",
("Cisco-IOS-XR-subscriber-ipsub-oper", "ip-subscriber"): "Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-pppoe-ma-gbl-cfg", "pppoe-cfg"): "Cisco_IOS_XR_subscriber_pppoe_ma_gbl_cfg.PppoeCfg",
("Cisco-IOS-XR-subscriber-pppoe-ma-gbl-cfg", "pppoe-cfg"): "Cisco_IOS_XR_subscriber_pppoe_ma_gbl_cfg.PppoeCfg",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-pppoe-ma-oper", "pppoe"): "Cisco_IOS_XR_subscriber_pppoe_ma_oper.Pppoe",
("Cisco-IOS-XR-subscriber-pppoe-ma-oper", "pppoe"): "Cisco_IOS_XR_subscriber_pppoe_ma_oper.Pppoe",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-session-mon-oper", "session-mon"): "Cisco_IOS_XR_subscriber_session_mon_oper.SessionMon",
("Cisco-IOS-XR-subscriber-session-mon-oper", "session-mon"): "Cisco_IOS_XR_subscriber_session_mon_oper.SessionMon",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-srg-cfg", "subscriber-redundancy"): "Cisco_IOS_XR_subscriber_srg_cfg.SubscriberRedundancy",
("Cisco-IOS-XR-subscriber-srg-cfg", "subscriber-redundancy"): "Cisco_IOS_XR_subscriber_srg_cfg.SubscriberRedundancy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-srg-oper", "subscriber-redundancy-manager"): "Cisco_IOS_XR_subscriber_srg_oper.SubscriberRedundancyManager",
("Cisco-IOS-XR-subscriber-srg-oper", "subscriber-redundancy-manager"): "Cisco_IOS_XR_subscriber_srg_oper.SubscriberRedundancyManager",
("http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-srg-oper", "subscriber-redundancy-agent"): "Cisco_IOS_XR_subscriber_srg_oper.SubscriberRedundancyAgent",
("Cisco-IOS-XR-subscriber-srg-oper", "subscriber-redundancy-agent"): "Cisco_IOS_XR_subscriber_srg_oper.SubscriberRedundancyAgent",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-alarm-mgr", "alarm_mgr"): "Cisco_IOS_XR_sysadmin_alarm_mgr.AlarmMgr",
("Cisco-IOS-XR-sysadmin-alarm-mgr", "alarm_mgr"): "Cisco_IOS_XR_sysadmin_alarm_mgr.AlarmMgr",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-asr9k-envmon-ui", "environment"): "Cisco_IOS_XR_sysadmin_asr9k_envmon_ui.Environment",
("Cisco-IOS-XR-sysadmin-asr9k-envmon-ui", "environment"): "Cisco_IOS_XR_sysadmin_asr9k_envmon_ui.Environment",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-asr9k-envmon-ui", "power-mgmt"): "Cisco_IOS_XR_sysadmin_asr9k_envmon_ui.PowerMgmt",
("Cisco-IOS-XR-sysadmin-asr9k-envmon-ui", "power-mgmt"): "Cisco_IOS_XR_sysadmin_asr9k_envmon_ui.PowerMgmt",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-clear-ASR9K", "clear"): "Cisco_IOS_XR_sysadmin_clear_asr9k.Clear",
("Cisco-IOS-XR-sysadmin-clear-asr9k", "clear"): "Cisco_IOS_XR_sysadmin_clear_asr9k.Clear",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "node-inventory"): "Cisco_IOS_XR_sysadmin_cm.NodeInventory",
("Cisco-IOS-XR-sysadmin-cm", "node-inventory"): "Cisco_IOS_XR_sysadmin_cm.NodeInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "card-inventory"): "Cisco_IOS_XR_sysadmin_cm.CardInventory",
("Cisco-IOS-XR-sysadmin-cm", "card-inventory"): "Cisco_IOS_XR_sysadmin_cm.CardInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "rack-inventory"): "Cisco_IOS_XR_sysadmin_cm.RackInventory",
("Cisco-IOS-XR-sysadmin-cm", "rack-inventory"): "Cisco_IOS_XR_sysadmin_cm.RackInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "system-service-inventory"): "Cisco_IOS_XR_sysadmin_cm.SystemServiceInventory",
("Cisco-IOS-XR-sysadmin-cm", "system-service-inventory"): "Cisco_IOS_XR_sysadmin_cm.SystemServiceInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "rack-service-inventory"): "Cisco_IOS_XR_sysadmin_cm.RackServiceInventory",
("Cisco-IOS-XR-sysadmin-cm", "rack-service-inventory"): "Cisco_IOS_XR_sysadmin_cm.RackServiceInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "sdr-inventory"): "Cisco_IOS_XR_sysadmin_cm.SdrInventory",
("Cisco-IOS-XR-sysadmin-cm", "sdr-inventory"): "Cisco_IOS_XR_sysadmin_cm.SdrInventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "leader-statistics"): "Cisco_IOS_XR_sysadmin_cm.LeaderStatistics",
("Cisco-IOS-XR-sysadmin-cm", "leader-statistics"): "Cisco_IOS_XR_sysadmin_cm.LeaderStatistics",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "topology-neighbors"): "Cisco_IOS_XR_sysadmin_cm.TopologyNeighbors",
("Cisco-IOS-XR-sysadmin-cm", "topology-neighbors"): "Cisco_IOS_XR_sysadmin_cm.TopologyNeighbors",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm", "placement"): "Cisco_IOS_XR_sysadmin_cm.Placement",
("Cisco-IOS-XR-sysadmin-cm", "placement"): "Cisco_IOS_XR_sysadmin_cm.Placement",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-controllers-ASR9K", "controller"): "Cisco_IOS_XR_sysadmin_controllers_asr9k.Controller",
("Cisco-IOS-XR-sysadmin-controllers-asr9k", "controller"): "Cisco_IOS_XR_sysadmin_controllers_asr9k.Controller",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-debug-trace", "config"): "Cisco_IOS_XR_sysadmin_debug_trace.Config",
("Cisco-IOS-XR-sysadmin-debug-trace", "config"): "Cisco_IOS_XR_sysadmin_debug_trace.Config",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ds", "services"): "Cisco_IOS_XR_sysadmin_ds.Services",
("Cisco-IOS-XR-sysadmin-ds", "services"): "Cisco_IOS_XR_sysadmin_ds.Services",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ds", "services-stats"): "Cisco_IOS_XR_sysadmin_ds.ServicesStats",
("Cisco-IOS-XR-sysadmin-ds", "services-stats"): "Cisco_IOS_XR_sysadmin_ds.ServicesStats",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-dumper", "exception"): "Cisco_IOS_XR_sysadmin_dumper.Exception",
("Cisco-IOS-XR-sysadmin-dumper", "exception"): "Cisco_IOS_XR_sysadmin_dumper.Exception",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-mib", "ENTITY-MIB"): "Cisco_IOS_XR_sysadmin_entity_mib.ENTITYMIB",
("Cisco-IOS-XR-sysadmin-entity-mib", "ENTITY-MIB"): "Cisco_IOS_XR_sysadmin_entity_mib.ENTITYMIB",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-sensor-mib", "CISCO-ENTITY-SENSOR-MIB"): "Cisco_IOS_XR_sysadmin_entity_sensor_mib.CISCOENTITYSENSORMIB",
("Cisco-IOS-XR-sysadmin-entity-sensor-mib", "CISCO-ENTITY-SENSOR-MIB"): "Cisco_IOS_XR_sysadmin_entity_sensor_mib.CISCOENTITYSENSORMIB",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-state-mib", "ENTITY-STATE-MIB"): "Cisco_IOS_XR_sysadmin_entity_state_mib.ENTITYSTATEMIB",
("Cisco-IOS-XR-sysadmin-entity-state-mib", "ENTITY-STATE-MIB"): "Cisco_IOS_XR_sysadmin_entity_state_mib.ENTITYSTATEMIB",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-envmon-ui", "environment"): "Cisco_IOS_XR_sysadmin_envmon_ui.Environment",
("Cisco-IOS-XR-sysadmin-envmon-ui", "environment"): "Cisco_IOS_XR_sysadmin_envmon_ui.Environment",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-envmon-ui", "power-mgmt"): "Cisco_IOS_XR_sysadmin_envmon_ui.PowerMgmt",
("Cisco-IOS-XR-sysadmin-envmon-ui", "power-mgmt"): "Cisco_IOS_XR_sysadmin_envmon_ui.PowerMgmt",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-external-usb", "external-usb"): "Cisco_IOS_XR_sysadmin_external_usb.ExternalUsb",
("Cisco-IOS-XR-sysadmin-external-usb", "external-usb"): "Cisco_IOS_XR_sysadmin_external_usb.ExternalUsb",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fm", "fm"): "Cisco_IOS_XR_sysadmin_fm.Fm",
("Cisco-IOS-XR-sysadmin-fm", "fm"): "Cisco_IOS_XR_sysadmin_fm.Fm",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd", "fpd"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpd.Fpd",
("Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd", "fpd"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpd.Fpd",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd-service", "location"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpd_service.Location",
("Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd-service", "location"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpd_service.Location",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpdserv-ctrace", "fpdserv"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpdserv_ctrace.Fpdserv",
("Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpdserv-ctrace", "fpdserv"): "Cisco_IOS_XR_sysadmin_fpd_infra_cli_fpdserv_ctrace.Fpdserv",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-hw-module", "hw-module"): "Cisco_IOS_XR_sysadmin_hw_module.HwModule",
("Cisco-IOS-XR-sysadmin-hw-module", "hw-module"): "Cisco_IOS_XR_sysadmin_hw_module.HwModule",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-hw-module-xrv9k", "hw-module"): "Cisco_IOS_XR_sysadmin_hw_module_xrv9k.HwModule",
("Cisco-IOS-XR-sysadmin-hw-module-xrv9k", "hw-module"): "Cisco_IOS_XR_sysadmin_hw_module_xrv9k.HwModule",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-instmgr-oper", "install"): "Cisco_IOS_XR_sysadmin_instmgr_oper.Install",
("Cisco-IOS-XR-sysadmin-instmgr-oper", "install"): "Cisco_IOS_XR_sysadmin_instmgr_oper.Install",
("http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-issu", "issu_action"): "Cisco_IOS_XR_sysadmin_issu.IssuAction",
("Cisco-IOS-XR-sysadmin-issu", "issu_action"): "Cisco_IOS_XR_sysadmin_issu.IssuAction",
("http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-issu", "issu"): "Cisco_IOS_XR_sysadmin_issu.Issu",
("Cisco-IOS-XR-sysadmin-issu", "issu"): "Cisco_IOS_XR_sysadmin_issu.Issu",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-led-mgr-ui", "led"): "Cisco_IOS_XR_sysadmin_led_mgr_ui.Led",
("Cisco-IOS-XR-sysadmin-led-mgr-ui", "led"): "Cisco_IOS_XR_sysadmin_led_mgr_ui.Led",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-nto-misc-set-hostname", "hostname"): "Cisco_IOS_XR_sysadmin_nto_misc_set_hostname.Hostname",
("Cisco-IOS-XR-sysadmin-nto-misc-set-hostname", "hostname"): "Cisco_IOS_XR_sysadmin_nto_misc_set_hostname.Hostname",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-obfl", "obfl"): "Cisco_IOS_XR_sysadmin_obfl.Obfl",
("Cisco-IOS-XR-sysadmin-obfl", "obfl"): "Cisco_IOS_XR_sysadmin_obfl.Obfl",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-pm", "processes"): "Cisco_IOS_XR_sysadmin_pm.Processes",
("Cisco-IOS-XR-sysadmin-pm", "processes"): "Cisco_IOS_XR_sysadmin_pm.Processes",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-pm", "process-manager"): "Cisco_IOS_XR_sysadmin_pm.ProcessManager",
("Cisco-IOS-XR-sysadmin-pm", "process-manager"): "Cisco_IOS_XR_sysadmin_pm.ProcessManager",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-pm", "pm"): "Cisco_IOS_XR_sysadmin_pm.Pm",
("Cisco-IOS-XR-sysadmin-pm", "pm"): "Cisco_IOS_XR_sysadmin_pm.Pm",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-rvm-mgr", "RVM"): "Cisco_IOS_XR_sysadmin_rvm_mgr.RVM",
("Cisco-IOS-XR-sysadmin-rvm-mgr", "RVM"): "Cisco_IOS_XR_sysadmin_rvm_mgr.RVM",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-config"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrConfig",
("Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-config"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrConfig",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-manager"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrManager",
("Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-manager"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrManager",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-operation"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrOperation",
("Cisco-IOS-XR-sysadmin-sdr-mgr", "sdr-operation"): "Cisco_IOS_XR_sysadmin_sdr_mgr.SdrOperation",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sdr-mgr", "private-sdr"): "Cisco_IOS_XR_sysadmin_sdr_mgr.PrivateSdr",
("Cisco-IOS-XR-sysadmin-sdr-mgr", "private-sdr"): "Cisco_IOS_XR_sysadmin_sdr_mgr.PrivateSdr",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-services", "service"): "Cisco_IOS_XR_sysadmin_services.Service",
("Cisco-IOS-XR-sysadmin-services", "service"): "Cisco_IOS_XR_sysadmin_services.Service",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ship", "stat"): "Cisco_IOS_XR_sysadmin_ship.Stat",
("Cisco-IOS-XR-sysadmin-ship", "stat"): "Cisco_IOS_XR_sysadmin_ship.Stat",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-diag", "diag"): "Cisco_IOS_XR_sysadmin_show_diag.Diag",
("Cisco-IOS-XR-sysadmin-show-diag", "diag"): "Cisco_IOS_XR_sysadmin_show_diag.Diag",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-inv", "inventory"): "Cisco_IOS_XR_sysadmin_show_inv.Inventory",
("Cisco-IOS-XR-sysadmin-show-inv", "inventory"): "Cisco_IOS_XR_sysadmin_show_inv.Inventory",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-cm", "cm"): "Cisco_IOS_XR_sysadmin_show_trace_cm.Cm",
("Cisco-IOS-XR-sysadmin-show-trace-cm", "cm"): "Cisco_IOS_XR_sysadmin_show_trace_cm.Cm",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-debug-agent", "debug_agent"): "Cisco_IOS_XR_sysadmin_show_trace_debug_agent.DebugAgent",
("Cisco-IOS-XR-sysadmin-show-trace-debug-agent", "debug_agent"): "Cisco_IOS_XR_sysadmin_show_trace_debug_agent.DebugAgent",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-instagt", "instagt"): "Cisco_IOS_XR_sysadmin_show_trace_instagt.Instagt",
("Cisco-IOS-XR-sysadmin-show-trace-instagt", "instagt"): "Cisco_IOS_XR_sysadmin_show_trace_instagt.Instagt",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-instmgr", "instmgr"): "Cisco_IOS_XR_sysadmin_show_trace_instmgr.Instmgr",
("Cisco-IOS-XR-sysadmin-show-trace-instmgr", "instmgr"): "Cisco_IOS_XR_sysadmin_show_trace_instmgr.Instmgr",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-vmm", "vmm"): "Cisco_IOS_XR_sysadmin_show_trace_vmm.Vmm",
("Cisco-IOS-XR-sysadmin-show-trace-vmm", "vmm"): "Cisco_IOS_XR_sysadmin_show_trace_vmm.Vmm",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sm", "oper"): "Cisco_IOS_XR_sysadmin_sm.Oper",
("Cisco-IOS-XR-sysadmin-sm", "oper"): "Cisco_IOS_XR_sysadmin_sm.Oper",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sm", "config"): "Cisco_IOS_XR_sysadmin_sm.Config",
("Cisco-IOS-XR-sysadmin-sm", "config"): "Cisco_IOS_XR_sysadmin_sm.Config",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sm", "actions"): "Cisco_IOS_XR_sysadmin_sm.Actions",
("Cisco-IOS-XR-sysadmin-sm", "actions"): "Cisco_IOS_XR_sysadmin_sm.Actions",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-system", "mgmt"): "Cisco_IOS_XR_sysadmin_system.Mgmt",
("Cisco-IOS-XR-sysadmin-system", "mgmt"): "Cisco_IOS_XR_sysadmin_system.Mgmt",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-tacacs-tacacs-server", "tacacs-server"): "Cisco_IOS_XR_sysadmin_tacacs_tacacs_server.TacacsServer",
("Cisco-IOS-XR-sysadmin-tacacs-tacacs-server", "tacacs-server"): "Cisco_IOS_XR_sysadmin_tacacs_tacacs_server.TacacsServer",
("http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-time-of-day-timezone", "clock"): "Cisco_IOS_XR_sysadmin_time_of_day_timezone.Clock",
("Cisco-IOS-XR-sysadmin-time-of-day-timezone", "clock"): "Cisco_IOS_XR_sysadmin_time_of_day_timezone.Clock",
("http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-time-of-day-timezone", "trace"): "Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace",
("Cisco-IOS-XR-sysadmin-time-of-day-timezone", "trace"): "Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-vm", "vm"): "Cisco_IOS_XR_sysadmin_vm.Vm",
("Cisco-IOS-XR-sysadmin-vm", "vm"): "Cisco_IOS_XR_sysadmin_vm.Vm",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-vm-mgr", "VM"): "Cisco_IOS_XR_sysadmin_vm_mgr.VM",
("Cisco-IOS-XR-sysadmin-vm-mgr", "VM"): "Cisco_IOS_XR_sysadmin_vm_mgr.VM",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-wdmon", "wdmon"): "Cisco_IOS_XR_sysadmin_wdmon.Wdmon",
("Cisco-IOS-XR-sysadmin-wdmon", "wdmon"): "Cisco_IOS_XR_sysadmin_wdmon.Wdmon",
("http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-wdmon", "wdmon-info"): "Cisco_IOS_XR_sysadmin_wdmon.WdmonInfo",
("Cisco-IOS-XR-sysadmin-wdmon", "wdmon-info"): "Cisco_IOS_XR_sysadmin_wdmon.WdmonInfo",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sysdb-oper", "sysdb-connections"): "Cisco_IOS_XR_sysdb_oper.SysdbConnections",
("Cisco-IOS-XR-sysdb-oper", "sysdb-connections"): "Cisco_IOS_XR_sysdb_oper.SysdbConnections",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sysdb-oper", "sysdb"): "Cisco_IOS_XR_sysdb_oper.Sysdb",
("Cisco-IOS-XR-sysdb-oper", "sysdb"): "Cisco_IOS_XR_sysdb_oper.Sysdb",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-cfg", "process-mandatory"): "Cisco_IOS_XR_sysmgr_cfg.ProcessMandatory",
("Cisco-IOS-XR-sysmgr-cfg", "process-mandatory"): "Cisco_IOS_XR_sysmgr_cfg.ProcessMandatory",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-cfg", "process-single-crash"): "Cisco_IOS_XR_sysmgr_cfg.ProcessSingleCrash",
("Cisco-IOS-XR-sysmgr-cfg", "process-single-crash"): "Cisco_IOS_XR_sysmgr_cfg.ProcessSingleCrash",
("http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-oper", "system-process"): "Cisco_IOS_XR_sysmgr_oper.SystemProcess",
("Cisco-IOS-XR-sysmgr-oper", "system-process"): "Cisco_IOS_XR_sysmgr_oper.SystemProcess",
("http://cisco.com/ns/yang/Cisco-IOS-XR-telemetry-model-driven-cfg", "telemetry-model-driven"): "Cisco_IOS_XR_telemetry_model_driven_cfg.TelemetryModelDriven",
("Cisco-IOS-XR-telemetry-model-driven-cfg", "telemetry-model-driven"): "Cisco_IOS_XR_telemetry_model_driven_cfg.TelemetryModelDriven",
("http://cisco.com/ns/yang/Cisco-IOS-XR-telemetry-model-driven-oper", "telemetry-model-driven"): "Cisco_IOS_XR_telemetry_model_driven_oper.TelemetryModelDriven",
("Cisco-IOS-XR-telemetry-model-driven-oper", "telemetry-model-driven"): "Cisco_IOS_XR_telemetry_model_driven_oper.TelemetryModelDriven",
("http://cisco.com/ns/yang/Cisco-IOS-XR-traffmon-netflow-cfg", "net-flow"): "Cisco_IOS_XR_traffmon_netflow_cfg.NetFlow",
("Cisco-IOS-XR-traffmon-netflow-cfg", "net-flow"): "Cisco_IOS_XR_traffmon_netflow_cfg.NetFlow",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tty-management-cmd-oper", "show-users"): "Cisco_IOS_XR_tty_management_cmd_oper.ShowUsers",
("Cisco-IOS-XR-tty-management-cmd-oper", "show-users"): "Cisco_IOS_XR_tty_management_cmd_oper.ShowUsers",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tty-server-cfg", "tty"): "Cisco_IOS_XR_tty_server_cfg.Tty",
("Cisco-IOS-XR-tty-server-cfg", "tty"): "Cisco_IOS_XR_tty_server_cfg.Tty",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tty-server-oper", "tty"): "Cisco_IOS_XR_tty_server_oper.Tty",
("Cisco-IOS-XR-tty-server-oper", "tty"): "Cisco_IOS_XR_tty_server_oper.Tty",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tty-vty-cfg", "vty"): "Cisco_IOS_XR_tty_vty_cfg.Vty",
("Cisco-IOS-XR-tty-vty-cfg", "vty"): "Cisco_IOS_XR_tty_vty_cfg.Vty",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-cfg", "l2tp"): "Cisco_IOS_XR_tunnel_l2tun_cfg.L2tp",
("Cisco-IOS-XR-tunnel-l2tun-cfg", "l2tp"): "Cisco_IOS_XR_tunnel_l2tun_cfg.L2tp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-oper", "l2tp"): "Cisco_IOS_XR_tunnel_l2tun_oper.L2tp",
("Cisco-IOS-XR-tunnel-l2tun-oper", "l2tp"): "Cisco_IOS_XR_tunnel_l2tun_oper.L2tp",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-oper", "l2tpv2"): "Cisco_IOS_XR_tunnel_l2tun_oper.L2tpv2",
("Cisco-IOS-XR-tunnel-l2tun-oper", "l2tpv2"): "Cisco_IOS_XR_tunnel_l2tun_oper.L2tpv2",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-nve-oper", "nve"): "Cisco_IOS_XR_tunnel_nve_oper.Nve",
("Cisco-IOS-XR-tunnel-nve-oper", "nve"): "Cisco_IOS_XR_tunnel_nve_oper.Nve",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-vpdn-cfg", "vpdn"): "Cisco_IOS_XR_tunnel_vpdn_cfg.Vpdn",
("Cisco-IOS-XR-tunnel-vpdn-cfg", "vpdn"): "Cisco_IOS_XR_tunnel_vpdn_cfg.Vpdn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-vpdn-oper", "vpdn"): "Cisco_IOS_XR_tunnel_vpdn_oper.Vpdn",
("Cisco-IOS-XR-tunnel-vpdn-oper", "vpdn"): "Cisco_IOS_XR_tunnel_vpdn_oper.Vpdn",
("http://cisco.com/ns/yang/Cisco-IOS-XR-upgrade-fpd-admin-cfg", "fpd"): "Cisco_IOS_XR_upgrade_fpd_admin_cfg.Fpd",
("Cisco-IOS-XR-upgrade-fpd-admin-cfg", "fpd"): "Cisco_IOS_XR_upgrade_fpd_admin_cfg.Fpd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-upgrade-fpd-oper", "fpd"): "Cisco_IOS_XR_upgrade_fpd_oper.Fpd_",
("Cisco-IOS-XR-upgrade-fpd-oper", "fpd"): "Cisco_IOS_XR_upgrade_fpd_oper.Fpd_",
("http://cisco.com/ns/yang/Cisco-IOS-XR-vservice-cfg", "vservice"): "Cisco_IOS_XR_vservice_cfg.Vservice",
("Cisco-IOS-XR-vservice-cfg", "vservice"): "Cisco_IOS_XR_vservice_cfg.Vservice",
("http://cisco.com/ns/yang/Cisco-IOS-XR-wanphy-ui-oper", "wanphy"): "Cisco_IOS_XR_wanphy_ui_oper.Wanphy",
("Cisco-IOS-XR-wanphy-ui-oper", "wanphy"): "Cisco_IOS_XR_wanphy_ui_oper.Wanphy",
("http://cisco.com/ns/yang/Cisco-IOS-XR-watchd-cfg", "watchdog"): "Cisco_IOS_XR_watchd_cfg.Watchdog",
("Cisco-IOS-XR-watchd-cfg", "watchdog"): "Cisco_IOS_XR_watchd_cfg.Watchdog",
("http://cisco.com/ns/yang/Cisco-IOS-XR-watchd-cfg", "watchd"): "Cisco_IOS_XR_watchd_cfg.Watchd",
("Cisco-IOS-XR-watchd-cfg", "watchd"): "Cisco_IOS_XR_watchd_cfg.Watchd",
("http://cisco.com/ns/yang/Cisco-IOS-XR-wd-cfg", "watchdog"): "Cisco_IOS_XR_wd_cfg.Watchdog",
("Cisco-IOS-XR-wd-cfg", "watchdog"): "Cisco_IOS_XR_wd_cfg.Watchdog",
("http://cisco.com/ns/yang/Cisco-IOS-XR-wd-oper", "watchdog"): "Cisco_IOS_XR_wd_oper.Watchdog",
("Cisco-IOS-XR-wd-oper", "watchdog"): "Cisco_IOS_XR_wd_oper.Watchdog",
("http://cisco.com/ns/yang/Cisco-IOS-XR-wdsysmon-fd-oper", "system-monitoring"): "Cisco_IOS_XR_wdsysmon_fd_oper.SystemMonitoring",
("Cisco-IOS-XR-wdsysmon-fd-oper", "system-monitoring"): "Cisco_IOS_XR_wdsysmon_fd_oper.SystemMonitoring",
("http://tail-f.com/ns/mibs/SNMP-COMMUNITY-MIB/200308060000Z", "SNMP-COMMUNITY-MIB"): "SNMP_COMMUNITY_MIB.SNMPCOMMUNITYMIB",
("SNMP-COMMUNITY-MIB", "SNMP-COMMUNITY-MIB"): "SNMP_COMMUNITY_MIB.SNMPCOMMUNITYMIB",
("http://tail-f.com/ns/mibs/SNMP-FRAMEWORK-MIB/200210140000Z", "SNMP-FRAMEWORK-MIB"): "SNMP_FRAMEWORK_MIB.SNMPFRAMEWORKMIB",
("SNMP-FRAMEWORK-MIB", "SNMP-FRAMEWORK-MIB"): "SNMP_FRAMEWORK_MIB.SNMPFRAMEWORKMIB",
("http://tail-f.com/ns/mibs/SNMP-MPD-MIB/200210140000Z", "SNMP-MPD-MIB"): "SNMP_MPD_MIB.SNMPMPDMIB",
("SNMP-MPD-MIB", "SNMP-MPD-MIB"): "SNMP_MPD_MIB.SNMPMPDMIB",
("http://tail-f.com/ns/mibs/SNMP-NOTIFICATION-MIB/200210140000Z", "SNMP-NOTIFICATION-MIB"): "SNMP_NOTIFICATION_MIB.SNMPNOTIFICATIONMIB",
("SNMP-NOTIFICATION-MIB", "SNMP-NOTIFICATION-MIB"): "SNMP_NOTIFICATION_MIB.SNMPNOTIFICATIONMIB",
("http://tail-f.com/ns/mibs/SNMP-TARGET-MIB/200210140000Z", "SNMP-TARGET-MIB"): "SNMP_TARGET_MIB.SNMPTARGETMIB",
("SNMP-TARGET-MIB", "SNMP-TARGET-MIB"): "SNMP_TARGET_MIB.SNMPTARGETMIB",
("http://tail-f.com/ns/mibs/SNMP-USER-BASED-SM-MIB/200210160000Z", "SNMP-USER-BASED-SM-MIB"): "SNMP_USER_BASED_SM_MIB.SNMPUSERBASEDSMMIB",
("SNMP-USER-BASED-SM-MIB", "SNMP-USER-BASED-SM-MIB"): "SNMP_USER_BASED_SM_MIB.SNMPUSERBASEDSMMIB",
("http://tail-f.com/ns/mibs/SNMP-VIEW-BASED-ACM-MIB/200210160000Z", "SNMP-VIEW-BASED-ACM-MIB"): "SNMP_VIEW_BASED_ACM_MIB.SNMPVIEWBASEDACMMIB",
("SNMP-VIEW-BASED-ACM-MIB", "SNMP-VIEW-BASED-ACM-MIB"): "SNMP_VIEW_BASED_ACM_MIB.SNMPVIEWBASEDACMMIB",
("http://tail-f.com/ns/mibs/SNMPv2-MIB/200210160000Z", "SNMPv2-MIB"): "SNMPv2_MIB.SNMPv2MIB",
("SNMPv2-MIB", "SNMPv2-MIB"): "SNMPv2_MIB.SNMPv2MIB",
("urn:ietf:params:xml:ns:netmod:notification", "netconf"): "nc_notifications.Netconf",
("nc-notifications", "netconf"): "nc_notifications.Netconf",
("http://cisco.com/calvados/ntp", "ntp"): "ntp.Ntp",
("ntp", "ntp"): "ntp.Ntp",
("http://cisco.com/calvados/ntp", "clock-action"): "ntp.ClockAction",
("ntp", "clock-action"): "ntp.ClockAction",
("http://www.cisco.com/panini/calvados/opertest1", "oper"): "opertest1.Oper",
("opertest1", "oper"): "opertest1.Oper",
("http://www.cisco.com/panini/calvados/opertest1", "actions"): "opertest1.Actions",
("opertest1", "actions"): "opertest1.Actions",
("http://tail-f.com/ns/aaa/1.1", "aaa"): "tailf_aaa.Aaa",
("tailf-aaa", "aaa"): "tailf_aaa.Aaa",
("http://tail-f.com/ns/aaa/1.1", "alias"): "tailf_aaa.Alias",
("tailf-aaa", "alias"): "tailf_aaa.Alias",
("http://tail-f.com/ns/aaa/1.1", "session"): "tailf_aaa.Session",
("tailf-aaa", "session"): "tailf_aaa.Session",
("http://tail-f.com/ns/aaa/1.1", "user"): "tailf_aaa.User",
("tailf-aaa", "user"): "tailf_aaa.User",
("http://tail-f.com/yang/confd-monitoring", "confd-state"): "tailf_confd_monitoring.ConfdState",
("tailf-confd-monitoring", "confd-state"): "tailf_confd_monitoring.ConfdState",
("http://www.cisco.com/panini/calvados/valtest", "config"): "valtest.Config",
("valtest", "config"): "valtest.Config",
("http://cisco.com/panini/calvados/vplatform", "virtual-platform"): "vplatform.VirtualPlatform",
("vplatform", "virtual-platform"): "vplatform.VirtualPlatform",
}
NAMESPACE_LOOKUP = {
"CISCO-ENTITY-FRU-CONTROL-MIB": "http://tail-f.com/ns/mibs/CISCO-ENTITY-FRU-CONTROL-MIB/200311240000Z",
"Cisco-IOS-XR-Ethernet-SPAN-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-cfg",
"Cisco-IOS-XR-Ethernet-SPAN-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-datatypes",
"Cisco-IOS-XR-Ethernet-SPAN-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-oper",
"Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg",
"Cisco-IOS-XR-Subscriber-infra-subdb-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-Subscriber-infra-subdb-oper",
"Cisco-IOS-XR-aaa-aaacore-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-aaacore-cfg",
"Cisco-IOS-XR-aaa-diameter-base-mib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-diameter-base-mib-cfg",
"Cisco-IOS-XR-aaa-diameter-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-diameter-cfg",
"Cisco-IOS-XR-aaa-diameter-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-diameter-oper",
"Cisco-IOS-XR-aaa-li-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-li-cfg",
"Cisco-IOS-XR-aaa-lib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-lib-cfg",
"Cisco-IOS-XR-aaa-lib-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-lib-datatypes",
"Cisco-IOS-XR-aaa-locald-admin-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-locald-admin-cfg",
"Cisco-IOS-XR-aaa-locald-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-locald-cfg",
"Cisco-IOS-XR-aaa-locald-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-locald-oper",
"Cisco-IOS-XR-aaa-nacm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-nacm-cfg",
"Cisco-IOS-XR-aaa-nacm-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-nacm-oper",
"Cisco-IOS-XR-aaa-protocol-radius-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-protocol-radius-cfg",
"Cisco-IOS-XR-aaa-protocol-radius-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-protocol-radius-oper",
"Cisco-IOS-XR-aaa-tacacs-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-tacacs-cfg",
"Cisco-IOS-XR-aaa-tacacs-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-aaa-tacacs-oper",
"Cisco-IOS-XR-accounting-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-accounting-cfg",
"Cisco-IOS-XR-alarmgr-server-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-alarmgr-server-oper",
"Cisco-IOS-XR-ascii-ltrace-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ascii-ltrace-oper",
"Cisco-IOS-XR-asic-errors-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asic-errors-oper",
"Cisco-IOS-XR-asr9k-ep-port-mode-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-ep-port-mode-cfg",
"Cisco-IOS-XR-asr9k-fab-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fab-cfg",
"Cisco-IOS-XR-asr9k-fia-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fia-cfg",
"Cisco-IOS-XR-asr9k-fsi-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-fsi-oper",
"Cisco-IOS-XR-asr9k-lc-ethctrl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-ethctrl-cfg",
"Cisco-IOS-XR-asr9k-lc-ethctrl-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-ethctrl-oper",
"Cisco-IOS-XR-asr9k-lc-fca-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-fca-oper",
"Cisco-IOS-XR-asr9k-lc-pwrglide-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lc-pwrglide-cfg",
"Cisco-IOS-XR-asr9k-lpts-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-lpts-oper",
"Cisco-IOS-XR-asr9k-netflow-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-netflow-oper",
"Cisco-IOS-XR-asr9k-np-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-np-oper",
"Cisco-IOS-XR-asr9k-prm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-prm-cfg",
"Cisco-IOS-XR-asr9k-ptp-pd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-ptp-pd-cfg",
"Cisco-IOS-XR-asr9k-ptp-pd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-ptp-pd-oper",
"Cisco-IOS-XR-asr9k-qos-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-qos-oper",
"Cisco-IOS-XR-asr9k-sc-diag-admin-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-diag-admin-oper",
"Cisco-IOS-XR-asr9k-sc-diag-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-diag-oper",
"Cisco-IOS-XR-asr9k-sc-envmon-admin-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-envmon-admin-oper",
"Cisco-IOS-XR-asr9k-sc-envmon-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-envmon-oper",
"Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-invmgr-admin-oper",
"Cisco-IOS-XR-asr9k-sc-invmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-sc-invmgr-oper",
"Cisco-IOS-XR-asr9k-xbar-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-asr9k-xbar-oper",
"Cisco-IOS-XR-atm-common-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-atm-common-datatypes",
"Cisco-IOS-XR-atm-vcm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-atm-vcm-cfg",
"Cisco-IOS-XR-atm-vcm-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-atm-vcm-oper",
"Cisco-IOS-XR-bundlemgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-cfg",
"Cisco-IOS-XR-bundlemgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-bundlemgr-oper",
"Cisco-IOS-XR-call-home-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-call-home-cfg",
"Cisco-IOS-XR-cdp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-cdp-cfg",
"Cisco-IOS-XR-cdp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-cdp-oper",
"Cisco-IOS-XR-cfgmgr-rollback-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-cfgmgr-rollback-act",
"Cisco-IOS-XR-clear-counters-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-clear-counters-act",
"Cisco-IOS-XR-clns-isis-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-cfg",
"Cisco-IOS-XR-clns-isis-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-datatypes",
"Cisco-IOS-XR-clns-isis-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-clns-isis-oper",
"Cisco-IOS-XR-cmproxy-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-cmproxy-oper",
"Cisco-IOS-XR-common-acl-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-common-acl-datatypes",
"Cisco-IOS-XR-config-cfgmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-cfg",
"Cisco-IOS-XR-config-cfgmgr-exec-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-exec-oper",
"Cisco-IOS-XR-config-cfgmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-cfgmgr-oper",
"Cisco-IOS-XR-config-mda-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-mda-cfg",
"Cisco-IOS-XR-config-mibs-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-mibs-cfg",
"Cisco-IOS-XR-config-valid-ccv-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-valid-ccv-cfg",
"Cisco-IOS-XR-config-valid-ccv-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-config-valid-ccv-oper",
"Cisco-IOS-XR-controller-ains-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-ains-act",
"Cisco-IOS-XR-controller-odu-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-odu-datatypes",
"Cisco-IOS-XR-controller-odu-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-odu-oper",
"Cisco-IOS-XR-controller-optics-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-optics-cfg",
"Cisco-IOS-XR-controller-optics-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-optics-oper",
"Cisco-IOS-XR-controller-otu-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-otu-cfg",
"Cisco-IOS-XR-controller-otu-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-controller-otu-oper",
"Cisco-IOS-XR-crypto-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-act",
"Cisco-IOS-XR-crypto-macsec-mka-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-mka-cfg",
"Cisco-IOS-XR-crypto-macsec-mka-if-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-mka-if-cfg",
"Cisco-IOS-XR-crypto-macsec-mka-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-mka-oper",
"Cisco-IOS-XR-crypto-macsec-secy-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-macsec-secy-oper",
"Cisco-IOS-XR-crypto-mibs-ipsecflowmon-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-mibs-ipsecflowmon-cfg",
"Cisco-IOS-XR-crypto-sam-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-sam-cfg",
"Cisco-IOS-XR-crypto-sam-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-sam-oper",
"Cisco-IOS-XR-crypto-ssh-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-ssh-cfg",
"Cisco-IOS-XR-crypto-ssh-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-crypto-ssh-oper",
"Cisco-IOS-XR-dot1x-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-cfg",
"Cisco-IOS-XR-dot1x-if-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-if-cfg",
"Cisco-IOS-XR-dot1x-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-dot1x-oper",
"Cisco-IOS-XR-drivers-icpe-ethernet-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-icpe-ethernet-cfg",
"Cisco-IOS-XR-drivers-media-eth-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-media-eth-act",
"Cisco-IOS-XR-drivers-media-eth-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-media-eth-cfg",
"Cisco-IOS-XR-drivers-media-eth-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-media-eth-oper",
"Cisco-IOS-XR-drivers-mpa-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-mpa-infra-cfg",
"Cisco-IOS-XR-drivers-vpa-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-vpa-infra-cfg",
"Cisco-IOS-XR-dwdm-ui-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-dwdm-ui-cfg",
"Cisco-IOS-XR-dwdm-ui-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-dwdm-ui-oper",
"Cisco-IOS-XR-eigrp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-eigrp-cfg",
"Cisco-IOS-XR-eigrp-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-eigrp-datatypes",
"Cisco-IOS-XR-eigrp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-eigrp-oper",
"Cisco-IOS-XR-es-acl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-es-acl-cfg",
"Cisco-IOS-XR-es-acl-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-es-acl-datatypes",
"Cisco-IOS-XR-es-acl-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-es-acl-oper",
"Cisco-IOS-XR-ethernet-cfm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-cfm-cfg",
"Cisco-IOS-XR-ethernet-cfm-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-cfm-datatypes",
"Cisco-IOS-XR-ethernet-cfm-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-cfm-oper",
"Cisco-IOS-XR-ethernet-cfm-sat-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-cfm-sat-cfg",
"Cisco-IOS-XR-ethernet-link-oam-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-link-oam-cfg",
"Cisco-IOS-XR-ethernet-link-oam-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-link-oam-oper",
"Cisco-IOS-XR-ethernet-lldp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-lldp-cfg",
"Cisco-IOS-XR-ethernet-lldp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ethernet-lldp-oper",
"Cisco-IOS-XR-evpn-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-evpn-oper",
"Cisco-IOS-XR-fib-common-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-cfg",
"Cisco-IOS-XR-fib-common-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-fib-common-oper",
"Cisco-IOS-XR-flashmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-flashmib-cfg",
"Cisco-IOS-XR-flowspec-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-flowspec-cfg",
"Cisco-IOS-XR-flowspec-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-flowspec-oper",
"Cisco-IOS-XR-fpd-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-fpd-infra-cfg",
"Cisco-IOS-XR-freqsync-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-cfg",
"Cisco-IOS-XR-freqsync-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-datatypes",
"Cisco-IOS-XR-freqsync-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-oper",
"Cisco-IOS-XR-freqsync-sat-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-freqsync-sat-cfg",
"Cisco-IOS-XR-group-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-group-cfg",
"Cisco-IOS-XR-ha-eem-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ha-eem-cfg",
"Cisco-IOS-XR-ha-eem-policy-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ha-eem-policy-oper",
"Cisco-IOS-XR-hwmod-bcc-disable-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-hwmod-bcc-disable-cfg",
"Cisco-IOS-XR-hwmod-mpa-reload-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-hwmod-mpa-reload-act",
"Cisco-IOS-XR-icpe-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-infra-cfg",
"Cisco-IOS-XR-icpe-infra-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-infra-oper",
"Cisco-IOS-XR-icpe-sdacp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-icpe-sdacp-oper",
"Cisco-IOS-XR-iedge4710-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-cfg",
"Cisco-IOS-XR-iedge4710-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-iedge4710-oper",
"Cisco-IOS-XR-ifmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg",
"Cisco-IOS-XR-ifmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-oper",
"Cisco-IOS-XR-ikev2-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ikev2-cfg",
"Cisco-IOS-XR-ikev2-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ikev2-oper",
"Cisco-IOS-XR-infra-alarm-logger-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-alarm-logger-cfg",
"Cisco-IOS-XR-infra-alarm-logger-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-alarm-logger-datatypes",
"Cisco-IOS-XR-infra-alarm-logger-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-alarm-logger-oper",
"Cisco-IOS-XR-infra-ceredundancymib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-ceredundancymib-cfg",
"Cisco-IOS-XR-infra-confcopymib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-confcopymib-cfg",
"Cisco-IOS-XR-infra-correlator-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-correlator-cfg",
"Cisco-IOS-XR-infra-correlator-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-correlator-oper",
"Cisco-IOS-XR-infra-fti-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-fti-cfg",
"Cisco-IOS-XR-infra-fti-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-fti-oper",
"Cisco-IOS-XR-infra-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-cfg",
"Cisco-IOS-XR-infra-infra-clock-linux-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-clock-linux-cfg",
"Cisco-IOS-XR-infra-infra-locale-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-infra-locale-cfg",
"Cisco-IOS-XR-infra-ltrace-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-ltrace-cfg",
"Cisco-IOS-XR-infra-notification-log-mib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-notification-log-mib-cfg",
"Cisco-IOS-XR-infra-nsr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-nsr-cfg",
"Cisco-IOS-XR-infra-objmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-objmgr-cfg",
"Cisco-IOS-XR-infra-objmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-objmgr-oper",
"Cisco-IOS-XR-infra-placed-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-placed-act",
"Cisco-IOS-XR-infra-policymgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-policymgr-cfg",
"Cisco-IOS-XR-infra-policymgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-policymgr-oper",
"Cisco-IOS-XR-infra-rcmd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rcmd-cfg",
"Cisco-IOS-XR-infra-rcmd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rcmd-oper",
"Cisco-IOS-XR-infra-rmf-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rmf-oper",
"Cisco-IOS-XR-infra-rsi-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-cfg",
"Cisco-IOS-XR-infra-rsi-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-oper",
"Cisco-IOS-XR-infra-rsi-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rsi-subscriber-cfg",
"Cisco-IOS-XR-infra-rt-check-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-rt-check-cfg",
"Cisco-IOS-XR-infra-serg-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-serg-cfg",
"Cisco-IOS-XR-infra-serg-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-serg-oper",
"Cisco-IOS-XR-infra-sla-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-cfg",
"Cisco-IOS-XR-infra-sla-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-datatypes",
"Cisco-IOS-XR-infra-sla-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-sla-oper",
"Cisco-IOS-XR-infra-statsd-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-statsd-act",
"Cisco-IOS-XR-infra-statsd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-statsd-cfg",
"Cisco-IOS-XR-infra-statsd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-statsd-oper",
"Cisco-IOS-XR-infra-syslog-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-cfg",
"Cisco-IOS-XR-infra-syslog-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-syslog-oper",
"Cisco-IOS-XR-infra-systemmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-systemmib-cfg",
"Cisco-IOS-XR-infra-tc-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-tc-cfg",
"Cisco-IOS-XR-infra-tc-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-tc-oper",
"Cisco-IOS-XR-infra-xtc-agent-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-agent-cfg",
"Cisco-IOS-XR-infra-xtc-agent-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-agent-oper",
"Cisco-IOS-XR-infra-xtc-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-cfg",
"Cisco-IOS-XR-infra-xtc-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-infra-xtc-oper",
"Cisco-IOS-XR-installmgr-admin-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-installmgr-admin-oper",
"Cisco-IOS-XR-invmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-invmgr-cfg",
"Cisco-IOS-XR-invmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-invmgr-oper",
"Cisco-IOS-XR-ip-bfd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-bfd-cfg",
"Cisco-IOS-XR-ip-bfd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-bfd-oper",
"Cisco-IOS-XR-ip-daps-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-daps-cfg",
"Cisco-IOS-XR-ip-daps-mib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-daps-mib-cfg",
"Cisco-IOS-XR-ip-daps-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-daps-oper",
"Cisco-IOS-XR-ip-domain-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-domain-cfg",
"Cisco-IOS-XR-ip-domain-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-domain-oper",
"Cisco-IOS-XR-ip-iarm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-cfg",
"Cisco-IOS-XR-ip-iarm-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-datatypes",
"Cisco-IOS-XR-ip-iarm-v4-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-v4-oper",
"Cisco-IOS-XR-ip-iarm-v6-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-v6-oper",
"Cisco-IOS-XR-ip-iarm-vrf-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iarm-vrf-cfg",
"Cisco-IOS-XR-ip-icmp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-icmp-cfg",
"Cisco-IOS-XR-ip-iep-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iep-cfg",
"Cisco-IOS-XR-ip-iep-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-iep-oper",
"Cisco-IOS-XR-ip-mobileip-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-mobileip-cfg",
"Cisco-IOS-XR-ip-mobileip-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-mobileip-oper",
"Cisco-IOS-XR-ip-ntp-admin-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-admin-oper",
"Cisco-IOS-XR-ip-ntp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-cfg",
"Cisco-IOS-XR-ip-ntp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-ntp-oper",
"Cisco-IOS-XR-ip-pfilter-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-pfilter-cfg",
"Cisco-IOS-XR-ip-pfilter-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-pfilter-oper",
"Cisco-IOS-XR-ip-pfilter-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-pfilter-subscriber-cfg",
"Cisco-IOS-XR-ip-raw-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-raw-cfg",
"Cisco-IOS-XR-ip-rib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-cfg",
"Cisco-IOS-XR-ip-rib-ipv4-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv4-oper",
"Cisco-IOS-XR-ip-rib-ipv6-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rib-ipv6-oper",
"Cisco-IOS-XR-ip-rip-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rip-cfg",
"Cisco-IOS-XR-ip-rip-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rip-oper",
"Cisco-IOS-XR-ip-rsvp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rsvp-cfg",
"Cisco-IOS-XR-ip-rsvp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-rsvp-oper",
"Cisco-IOS-XR-ip-sbfd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-sbfd-cfg",
"Cisco-IOS-XR-ip-sbfd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-sbfd-oper",
"Cisco-IOS-XR-ip-static-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-static-cfg",
"Cisco-IOS-XR-ip-tcp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-cfg",
"Cisco-IOS-XR-ip-tcp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-tcp-oper",
"Cisco-IOS-XR-ip-udp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-udp-cfg",
"Cisco-IOS-XR-ip-udp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ip-udp-oper",
"Cisco-IOS-XR-ipv4-acl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-acl-cfg",
"Cisco-IOS-XR-ipv4-acl-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-acl-datatypes",
"Cisco-IOS-XR-ipv4-acl-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-acl-oper",
"Cisco-IOS-XR-ipv4-arp-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-act",
"Cisco-IOS-XR-ipv4-arp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-cfg",
"Cisco-IOS-XR-ipv4-arp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-arp-oper",
"Cisco-IOS-XR-ipv4-autorp-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-autorp-datatypes",
"Cisco-IOS-XR-ipv4-autorp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-autorp-oper",
"Cisco-IOS-XR-ipv4-bgp-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-act",
"Cisco-IOS-XR-ipv4-bgp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-cfg",
"Cisco-IOS-XR-ipv4-bgp-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-datatypes",
"Cisco-IOS-XR-ipv4-bgp-oc-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-oc-oper",
"Cisco-IOS-XR-ipv4-bgp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-bgp-oper",
"Cisco-IOS-XR-ipv4-cinetd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-cinetd-cfg",
"Cisco-IOS-XR-ipv4-dc-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dc-cfg",
"Cisco-IOS-XR-ipv4-dhcpd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-cfg",
"Cisco-IOS-XR-ipv4-dhcpd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-oper",
"Cisco-IOS-XR-ipv4-dhcpd-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-dhcpd-subscriber-cfg",
"Cisco-IOS-XR-ipv4-filesystems-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-filesystems-cfg",
"Cisco-IOS-XR-ipv4-hsrp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-hsrp-cfg",
"Cisco-IOS-XR-ipv4-hsrp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-hsrp-oper",
"Cisco-IOS-XR-ipv4-igmp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-cfg",
"Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg",
"Cisco-IOS-XR-ipv4-igmp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-igmp-oper",
"Cisco-IOS-XR-ipv4-io-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-io-cfg",
"Cisco-IOS-XR-ipv4-io-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-io-oper",
"Cisco-IOS-XR-ipv4-ma-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ma-cfg",
"Cisco-IOS-XR-ipv4-ma-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ma-oper",
"Cisco-IOS-XR-ipv4-ma-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ma-subscriber-cfg",
"Cisco-IOS-XR-ipv4-mfwd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-mfwd-cfg",
"Cisco-IOS-XR-ipv4-msdp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-msdp-cfg",
"Cisco-IOS-XR-ipv4-ospf-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ospf-act",
"Cisco-IOS-XR-ipv4-ospf-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ospf-cfg",
"Cisco-IOS-XR-ipv4-ospf-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-ospf-oper",
"Cisco-IOS-XR-ipv4-pim-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-cfg",
"Cisco-IOS-XR-ipv4-pim-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-pim-oper",
"Cisco-IOS-XR-ipv4-smiap-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-smiap-cfg",
"Cisco-IOS-XR-ipv4-telnet-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-telnet-cfg",
"Cisco-IOS-XR-ipv4-telnet-mgmt-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-telnet-mgmt-cfg",
"Cisco-IOS-XR-ipv4-vrrp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-vrrp-cfg",
"Cisco-IOS-XR-ipv4-vrrp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-vrrp-oper",
"Cisco-IOS-XR-ipv6-acl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-acl-cfg",
"Cisco-IOS-XR-ipv6-acl-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-acl-datatypes",
"Cisco-IOS-XR-ipv6-acl-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-acl-oper",
"Cisco-IOS-XR-ipv6-io-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-io-cfg",
"Cisco-IOS-XR-ipv6-io-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-io-oper",
"Cisco-IOS-XR-ipv6-ma-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ma-cfg",
"Cisco-IOS-XR-ipv6-ma-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ma-oper",
"Cisco-IOS-XR-ipv6-ma-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ma-subscriber-cfg",
"Cisco-IOS-XR-ipv6-nd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-nd-cfg",
"Cisco-IOS-XR-ipv6-nd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-nd-oper",
"Cisco-IOS-XR-ipv6-nd-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-nd-subscriber-cfg",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-new-dhcpv6d-cfg",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-new-dhcpv6d-oper",
"Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg",
"Cisco-IOS-XR-ipv6-ospfv3-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ospfv3-act",
"Cisco-IOS-XR-ipv6-ospfv3-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ospfv3-cfg",
"Cisco-IOS-XR-ipv6-ospfv3-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-ospfv3-oper",
"Cisco-IOS-XR-ipv6-smiap-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ipv6-smiap-cfg",
"Cisco-IOS-XR-isis-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-isis-act",
"Cisco-IOS-XR-kim-tpa-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-kim-tpa-cfg",
"Cisco-IOS-XR-l2-eth-infra-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-cfg",
"Cisco-IOS-XR-l2-eth-infra-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-datatypes",
"Cisco-IOS-XR-l2-eth-infra-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2-eth-infra-oper",
"Cisco-IOS-XR-l2rib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2rib-cfg",
"Cisco-IOS-XR-l2rib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2rib-oper",
"Cisco-IOS-XR-l2vpn-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-cfg",
"Cisco-IOS-XR-l2vpn-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-l2vpn-oper",
"Cisco-IOS-XR-li-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-li-cfg",
"Cisco-IOS-XR-lib-keychain-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-act",
"Cisco-IOS-XR-lib-keychain-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-cfg",
"Cisco-IOS-XR-lib-keychain-masterkey-aes-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-masterkey-aes-cfg",
"Cisco-IOS-XR-lib-keychain-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-keychain-oper",
"Cisco-IOS-XR-lib-mpp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-mpp-cfg",
"Cisco-IOS-XR-lib-mpp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lib-mpp-oper",
"Cisco-IOS-XR-linux-os-heap-summary-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-linux-os-heap-summary-oper",
"Cisco-IOS-XR-linux-os-reboot-history-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-linux-os-reboot-history-oper",
"Cisco-IOS-XR-lmp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lmp-cfg",
"Cisco-IOS-XR-lmp-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-lmp-datatypes",
"Cisco-IOS-XR-lmp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lmp-oper",
"Cisco-IOS-XR-lpts-ifib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-ifib-oper",
"Cisco-IOS-XR-lpts-lib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-lib-cfg",
"Cisco-IOS-XR-lpts-pa-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-pa-oper",
"Cisco-IOS-XR-lpts-pre-ifib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-pre-ifib-cfg",
"Cisco-IOS-XR-lpts-pre-ifib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-pre-ifib-oper",
"Cisco-IOS-XR-lpts-punt-flowtrap-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-lpts-punt-flowtrap-cfg",
"Cisco-IOS-XR-man-ems-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-ems-cfg",
"Cisco-IOS-XR-man-ems-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-ems-oper",
"Cisco-IOS-XR-man-ipsla-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-ipsla-cfg",
"Cisco-IOS-XR-man-ipsla-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-ipsla-oper",
"Cisco-IOS-XR-man-netconf-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-netconf-cfg",
"Cisco-IOS-XR-man-xml-ttyagent-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-cfg",
"Cisco-IOS-XR-man-xml-ttyagent-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-man-xml-ttyagent-oper",
"Cisco-IOS-XR-manageability-object-tracking-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-object-tracking-cfg",
"Cisco-IOS-XR-manageability-object-tracking-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-object-tracking-datatypes",
"Cisco-IOS-XR-manageability-object-tracking-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-object-tracking-oper",
"Cisco-IOS-XR-manageability-perfmgmt-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-perfmgmt-cfg",
"Cisco-IOS-XR-manageability-perfmgmt-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-perfmgmt-datatypes",
"Cisco-IOS-XR-manageability-perfmgmt-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-manageability-perfmgmt-oper",
"Cisco-IOS-XR-mdrv-lib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mdrv-lib-cfg",
"Cisco-IOS-XR-mediasvr-linux-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mediasvr-linux-oper",
"Cisco-IOS-XR-mpls-io-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-io-cfg",
"Cisco-IOS-XR-mpls-io-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-io-oper",
"Cisco-IOS-XR-mpls-ldp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-cfg",
"Cisco-IOS-XR-mpls-ldp-cfg-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-cfg-datatypes",
"Cisco-IOS-XR-mpls-ldp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-oper",
"Cisco-IOS-XR-mpls-ldp-oper-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-ldp-oper-datatypes",
"Cisco-IOS-XR-mpls-lsd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-lsd-cfg",
"Cisco-IOS-XR-mpls-lsd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-lsd-oper",
"Cisco-IOS-XR-mpls-oam-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-oam-cfg",
"Cisco-IOS-XR-mpls-oam-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-oam-oper",
"Cisco-IOS-XR-mpls-static-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-static-cfg",
"Cisco-IOS-XR-mpls-static-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-static-oper",
"Cisco-IOS-XR-mpls-te-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-cfg",
"Cisco-IOS-XR-mpls-te-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-datatypes",
"Cisco-IOS-XR-mpls-te-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-te-oper",
"Cisco-IOS-XR-mpls-vpn-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-vpn-cfg",
"Cisco-IOS-XR-mpls-vpn-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-mpls-vpn-oper",
"Cisco-IOS-XR-ncs5k-fea-pfilter-nonatomic-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ncs5k-fea-pfilter-nonatomic-cfg",
"Cisco-IOS-XR-nto-misc-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-nto-misc-oper",
"Cisco-IOS-XR-opendns-deviceid-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-opendns-deviceid-cfg",
"Cisco-IOS-XR-opticalmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-opticalmib-cfg",
"Cisco-IOS-XR-opticalotsmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-opticalotsmib-cfg",
"Cisco-IOS-XR-optics-driver-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-optics-driver-cfg",
"Cisco-IOS-XR-optics-driver-quad-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-optics-driver-quad-cfg",
"Cisco-IOS-XR-otnifmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-otnifmib-cfg",
"Cisco-IOS-XR-parser-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-parser-cfg",
"Cisco-IOS-XR-pbr-bng-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-bng-cfg",
"Cisco-IOS-XR-pbr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-cfg",
"Cisco-IOS-XR-pbr-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-datatypes",
"Cisco-IOS-XR-pbr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-oper",
"Cisco-IOS-XR-pbr-subscriber-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-subscriber-cfg",
"Cisco-IOS-XR-pbr-vrf-policy-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vrf-policy-cfg",
"Cisco-IOS-XR-pbr-vservice-ea-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vservice-ea-oper",
"Cisco-IOS-XR-pbr-vservice-mgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pbr-vservice-mgr-oper",
"Cisco-IOS-XR-perf-meas-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-perf-meas-cfg",
"Cisco-IOS-XR-perf-meas-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-perf-meas-oper",
"Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-ctrlr-oper",
"Cisco-IOS-XR-pfi-im-cmd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-oper",
"Cisco-IOS-XR-pfm-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pfm-oper",
"Cisco-IOS-XR-ping-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-ping-act",
"Cisco-IOS-XR-plat-chas-invmgr-ng-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-ng-oper",
"Cisco-IOS-XR-plat-chas-invmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-plat-chas-invmgr-oper",
"Cisco-IOS-XR-platform-pifib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-platform-pifib-oper",
"Cisco-IOS-XR-pmengine-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-pmengine-cfg",
"Cisco-IOS-XR-pmengine-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pmengine-oper",
"Cisco-IOS-XR-policy-repository-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-policy-repository-cfg",
"Cisco-IOS-XR-policy-repository-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-policy-repository-oper",
"Cisco-IOS-XR-ppp-ea-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ea-oper",
"Cisco-IOS-XR-ppp-ma-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-cfg",
"Cisco-IOS-XR-ppp-ma-fsm-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-fsm-cfg",
"Cisco-IOS-XR-ppp-ma-gbl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-gbl-cfg",
"Cisco-IOS-XR-ppp-ma-ipcp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-ipcp-cfg",
"Cisco-IOS-XR-ppp-ma-ipcpiw-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-ipcpiw-cfg",
"Cisco-IOS-XR-ppp-ma-lcp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-lcp-cfg",
"Cisco-IOS-XR-ppp-ma-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-oper",
"Cisco-IOS-XR-ppp-ma-ssrp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-ssrp-cfg",
"Cisco-IOS-XR-ppp-ma-syslog-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ppp-ma-syslog-cfg",
"Cisco-IOS-XR-pppoe-ea-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-pppoe-ea-oper",
"Cisco-IOS-XR-prm-hwmod-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-cfg",
"Cisco-IOS-XR-prm-hwmod-profile-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-profile-cfg",
"Cisco-IOS-XR-prm-hwmod-sr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-prm-hwmod-sr-cfg",
"Cisco-IOS-XR-prm-server-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-prm-server-oper",
"Cisco-IOS-XR-procfind-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-procfind-oper",
"Cisco-IOS-XR-procmem-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-procmem-oper",
"Cisco-IOS-XR-ptp-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-cfg",
"Cisco-IOS-XR-ptp-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-datatypes",
"Cisco-IOS-XR-ptp-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-oper",
"Cisco-IOS-XR-qos-ma-bng-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-bng-cfg",
"Cisco-IOS-XR-qos-ma-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-cfg",
"Cisco-IOS-XR-qos-ma-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-oper",
"Cisco-IOS-XR-qos-ma-sat-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-qos-ma-sat-cfg",
"Cisco-IOS-XR-qos-mibs-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-qos-mibs-cfg",
"Cisco-IOS-XR-remote-attestation-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-remote-attestation-act",
"Cisco-IOS-XR-rgmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-rgmgr-cfg",
"Cisco-IOS-XR-rgmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-rgmgr-oper",
"Cisco-IOS-XR-sdr-invmgr-diag-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-sdr-invmgr-diag-oper",
"Cisco-IOS-XR-sdr-invmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-sdr-invmgr-oper",
"Cisco-IOS-XR-segment-routing-ms-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-cfg",
"Cisco-IOS-XR-segment-routing-ms-common-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-common-cfg",
"Cisco-IOS-XR-segment-routing-ms-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-ms-oper",
"Cisco-IOS-XR-segment-routing-srv6-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-srv6-cfg",
"Cisco-IOS-XR-segment-routing-srv6-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-srv6-datatypes",
"Cisco-IOS-XR-segment-routing-srv6-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-segment-routing-srv6-oper",
"Cisco-IOS-XR-shellutil-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-cfg",
"Cisco-IOS-XR-shellutil-copy-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-copy-act",
"Cisco-IOS-XR-shellutil-delete-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-delete-act",
"Cisco-IOS-XR-shellutil-filesystem-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-filesystem-oper",
"Cisco-IOS-XR-shellutil-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-shellutil-oper",
"Cisco-IOS-XR-show-fpd-loc-ng-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-show-fpd-loc-ng-oper",
"Cisco-IOS-XR-skp-qos-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-skp-qos-oper",
"Cisco-IOS-XR-skywarp-netflow-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-skywarp-netflow-oper",
"Cisco-IOS-XR-snmp-agent-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-agent-cfg",
"Cisco-IOS-XR-snmp-agent-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-agent-oper",
"Cisco-IOS-XR-snmp-bridgemib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-bridgemib-cfg",
"Cisco-IOS-XR-snmp-ciscosensormib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-ciscosensormib-cfg",
"Cisco-IOS-XR-snmp-entityextmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-entityextmib-cfg",
"Cisco-IOS-XR-snmp-entitymib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-entitymib-cfg",
"Cisco-IOS-XR-snmp-entitymib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-entitymib-oper",
"Cisco-IOS-XR-snmp-entstatemib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-entstatemib-cfg",
"Cisco-IOS-XR-snmp-frucontrolmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-frucontrolmib-cfg",
"Cisco-IOS-XR-snmp-ifmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-ifmib-cfg",
"Cisco-IOS-XR-snmp-ifmib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-ifmib-oper",
"Cisco-IOS-XR-snmp-mib-rfmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-mib-rfmib-cfg",
"Cisco-IOS-XR-snmp-sensormib-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-sensormib-oper",
"Cisco-IOS-XR-snmp-syslogmib-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-syslogmib-cfg",
"Cisco-IOS-XR-snmp-test-trap-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-snmp-test-trap-act",
"Cisco-IOS-XR-spirit-corehelper-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-spirit-corehelper-cfg",
"Cisco-IOS-XR-spirit-install-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-spirit-install-act",
"Cisco-IOS-XR-spirit-install-instmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-spirit-install-instmgr-oper",
"Cisco-IOS-XR-sse-span-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-sse-span-oper",
"Cisco-IOS-XR-subscriber-accounting-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-accounting-cfg",
"Cisco-IOS-XR-subscriber-accounting-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-accounting-oper",
"Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg",
"Cisco-IOS-XR-subscriber-ipsub-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-ipsub-cfg",
"Cisco-IOS-XR-subscriber-ipsub-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-ipsub-oper",
"Cisco-IOS-XR-subscriber-pppoe-ma-cmd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-pppoe-ma-cmd-cfg",
"Cisco-IOS-XR-subscriber-pppoe-ma-gbl-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-pppoe-ma-gbl-cfg",
"Cisco-IOS-XR-subscriber-pppoe-ma-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-pppoe-ma-oper",
"Cisco-IOS-XR-subscriber-session-mon-mibs-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-session-mon-mibs-cfg",
"Cisco-IOS-XR-subscriber-session-mon-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-session-mon-oper",
"Cisco-IOS-XR-subscriber-srg-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-srg-cfg",
"Cisco-IOS-XR-subscriber-srg-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-subscriber-srg-oper",
"Cisco-IOS-XR-sysadmin-aaa-aaa-show": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-aaa-aaa-show",
"Cisco-IOS-XR-sysadmin-aaa-disaster-recovery": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-aaa-disaster-recovery",
"Cisco-IOS-XR-sysadmin-alarm-mgr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-alarm-mgr",
"Cisco-IOS-XR-sysadmin-asr9k-envmon-types": "http://www.cisco.com/ns/Cisco-IOS-XR-sysadmin-asr9k-envmon-types",
"Cisco-IOS-XR-sysadmin-asr9k-envmon-ui": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-asr9k-envmon-ui",
"Cisco-IOS-XR-sysadmin-clear-asr9k": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-clear-ASR9K",
"Cisco-IOS-XR-sysadmin-cm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-cm",
"Cisco-IOS-XR-sysadmin-controllers-asr9k": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-controllers-ASR9K",
"Cisco-IOS-XR-sysadmin-debug-trace": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-debug-trace",
"Cisco-IOS-XR-sysadmin-ds": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ds",
"Cisco-IOS-XR-sysadmin-dumper": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-dumper",
"Cisco-IOS-XR-sysadmin-entity-mib": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-mib",
"Cisco-IOS-XR-sysadmin-entity-sensor-mib": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-sensor-mib",
"Cisco-IOS-XR-sysadmin-entity-state-mib": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-state-mib",
"Cisco-IOS-XR-sysadmin-entity-state-tc-mib": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-entity-state-tc-mib",
"Cisco-IOS-XR-sysadmin-envmon-types": "http://www.cisco.com/ns/Cisco-IOS-XR-sysadmin-envmon-types",
"Cisco-IOS-XR-sysadmin-envmon-ui": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-envmon-ui",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-admin-exec": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-admin-exec",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-bridge": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-bridge",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-clear": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-clear",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-cli-asr9k": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-cli-asr9k",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-debug": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-debug",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-fdb": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-fdb",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mac": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-mac",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mgmt-agent": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-mgmt-agent",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-mlap": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-mlap",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-reachable": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-reachable",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-sdr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-sdr",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-serdes": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-serdes",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-sfp": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-sfp",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-statistics": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-statistics",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-summary": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-summary",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-trace": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-trace",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-trunk": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-trunk",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-types": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-types",
"Cisco-IOS-XR-sysadmin-ethsw-esdma-vlan": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ethsw-esdma-vlan",
"Cisco-IOS-XR-sysadmin-external-usb": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-external-usb",
"Cisco-IOS-XR-sysadmin-fm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fm",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd-service": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpd-service",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpdserv-ctrace": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-fpdserv-ctrace",
"Cisco-IOS-XR-sysadmin-fpd-infra-cli-shhwfpd": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-fpd-infra-cli-shhwfpd",
"Cisco-IOS-XR-sysadmin-hw-module": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-hw-module",
"Cisco-IOS-XR-sysadmin-hw-module-xrv9k": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-hw-module-xrv9k",
"Cisco-IOS-XR-sysadmin-instmgr-oper": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-instmgr-oper",
"Cisco-IOS-XR-sysadmin-issu": "http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-issu",
"Cisco-IOS-XR-sysadmin-led-mgr-ui": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-led-mgr-ui",
"Cisco-IOS-XR-sysadmin-nto-misc-set-hostname": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-nto-misc-set-hostname",
"Cisco-IOS-XR-sysadmin-obfl": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-obfl",
"Cisco-IOS-XR-sysadmin-obfl-conf": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-obfl-conf",
"Cisco-IOS-XR-sysadmin-pm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-pm",
"Cisco-IOS-XR-sysadmin-rvm-mgr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-rvm-mgr",
"Cisco-IOS-XR-sysadmin-sdr-mgr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sdr-mgr",
"Cisco-IOS-XR-sysadmin-services": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-services",
"Cisco-IOS-XR-sysadmin-ship": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-ship",
"Cisco-IOS-XR-sysadmin-show-diag": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-diag",
"Cisco-IOS-XR-sysadmin-show-inv": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-inv",
"Cisco-IOS-XR-sysadmin-show-obfl": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-obfl",
"Cisco-IOS-XR-sysadmin-show-trace": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace",
"Cisco-IOS-XR-sysadmin-show-trace-cm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-cm",
"Cisco-IOS-XR-sysadmin-show-trace-debug-agent": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-debug-agent",
"Cisco-IOS-XR-sysadmin-show-trace-instagt": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-instagt",
"Cisco-IOS-XR-sysadmin-show-trace-instmgr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-instmgr",
"Cisco-IOS-XR-sysadmin-show-trace-vmm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-show-trace-vmm",
"Cisco-IOS-XR-sysadmin-sm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sm",
"Cisco-IOS-XR-sysadmin-sm-hw-mod": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-sm-hw-mod",
"Cisco-IOS-XR-sysadmin-syslog": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-syslog",
"Cisco-IOS-XR-sysadmin-system": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-system",
"Cisco-IOS-XR-sysadmin-tacacs-show-tacacs": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-tacacs-show-tacacs",
"Cisco-IOS-XR-sysadmin-tacacs-tacacs-server": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-tacacs-tacacs-server",
"Cisco-IOS-XR-sysadmin-tacacs-test-tacacs": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-tacacs-test-tacacs",
"Cisco-IOS-XR-sysadmin-time-of-day-timezone": "http://cisco.com/calvados/Cisco-IOS-XR-sysadmin-time-of-day-timezone",
"Cisco-IOS-XR-sysadmin-types": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-types",
"Cisco-IOS-XR-sysadmin-vm": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-vm",
"Cisco-IOS-XR-sysadmin-vm-mgr": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-vm-mgr",
"Cisco-IOS-XR-sysadmin-wdmon": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-wdmon",
"Cisco-IOS-XR-sysadmin-zapdisk": "http://www.cisco.com/ns/yang/Cisco-IOS-XR-sysadmin-zapdisk",
"Cisco-IOS-XR-sysdb-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-sysdb-oper",
"Cisco-IOS-XR-syslog-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-syslog-act",
"Cisco-IOS-XR-sysmgr-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-act",
"Cisco-IOS-XR-sysmgr-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-cfg",
"Cisco-IOS-XR-sysmgr-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-sysmgr-oper",
"Cisco-IOS-XR-telemetry-model-driven-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-telemetry-model-driven-cfg",
"Cisco-IOS-XR-telemetry-model-driven-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-telemetry-model-driven-oper",
"Cisco-IOS-XR-traceroute-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-traceroute-act",
"Cisco-IOS-XR-traffmon-netflow-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-traffmon-netflow-cfg",
"Cisco-IOS-XR-tty-management-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-management-cfg",
"Cisco-IOS-XR-tty-management-cmd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-management-cmd-oper",
"Cisco-IOS-XR-tty-management-datatypes": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-management-datatypes",
"Cisco-IOS-XR-tty-management-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-management-oper",
"Cisco-IOS-XR-tty-server-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-server-cfg",
"Cisco-IOS-XR-tty-server-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-server-oper",
"Cisco-IOS-XR-tty-vty-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tty-vty-cfg",
"Cisco-IOS-XR-tunnel-gre-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-gre-cfg",
"Cisco-IOS-XR-tunnel-l2tun-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-cfg",
"Cisco-IOS-XR-tunnel-l2tun-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-oper",
"Cisco-IOS-XR-tunnel-l2tun-proto-mibs-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-l2tun-proto-mibs-cfg",
"Cisco-IOS-XR-tunnel-nve-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-nve-cfg",
"Cisco-IOS-XR-tunnel-nve-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-nve-oper",
"Cisco-IOS-XR-tunnel-vpdn-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-vpdn-cfg",
"Cisco-IOS-XR-tunnel-vpdn-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-tunnel-vpdn-oper",
"Cisco-IOS-XR-types": "http://cisco.com/ns/yang/cisco-xr-types",
"Cisco-IOS-XR-upgrade-fpd-admin-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-upgrade-fpd-admin-cfg",
"Cisco-IOS-XR-upgrade-fpd-ng-act": "http://cisco.com/ns/yang/Cisco-IOS-XR-upgrade-fpd-ng-act",
"Cisco-IOS-XR-upgrade-fpd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-upgrade-fpd-oper",
"Cisco-IOS-XR-vservice-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-vservice-cfg",
"Cisco-IOS-XR-wanphy-ui-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-wanphy-ui-cfg",
"Cisco-IOS-XR-wanphy-ui-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-wanphy-ui-oper",
"Cisco-IOS-XR-watchd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-watchd-cfg",
"Cisco-IOS-XR-wd-cfg": "http://cisco.com/ns/yang/Cisco-IOS-XR-wd-cfg",
"Cisco-IOS-XR-wd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-wd-oper",
"Cisco-IOS-XR-wdsysmon-fd-oper": "http://cisco.com/ns/yang/Cisco-IOS-XR-wdsysmon-fd-oper",
"INET-ADDRESS-MIB": "http://tail-f.com/ns/mibs/INET-ADDRESS-MIB/200205090000Z",
"IPV6-TC": "http://tail-f.com/ns/mibs/IPV6-TC/199812010000Z",
"SNMP-COMMUNITY-MIB": "http://tail-f.com/ns/mibs/SNMP-COMMUNITY-MIB/200308060000Z",
"SNMP-FRAMEWORK-MIB": "http://tail-f.com/ns/mibs/SNMP-FRAMEWORK-MIB/200210140000Z",
"SNMP-MPD-MIB": "http://tail-f.com/ns/mibs/SNMP-MPD-MIB/200210140000Z",
"SNMP-NOTIFICATION-MIB": "http://tail-f.com/ns/mibs/SNMP-NOTIFICATION-MIB/200210140000Z",
"SNMP-TARGET-MIB": "http://tail-f.com/ns/mibs/SNMP-TARGET-MIB/200210140000Z",
"SNMP-USER-BASED-SM-MIB": "http://tail-f.com/ns/mibs/SNMP-USER-BASED-SM-MIB/200210160000Z",
"SNMP-VIEW-BASED-ACM-MIB": "http://tail-f.com/ns/mibs/SNMP-VIEW-BASED-ACM-MIB/200210160000Z",
"SNMPv2-MIB": "http://tail-f.com/ns/mibs/SNMPv2-MIB/200210160000Z",
"SNMPv2-SMI": "http://tail-f.com/ns/mibs/SNMPv2-SMI/1.0",
"SNMPv2-TC": "http://tail-f.com/ns/mibs/SNMPv2-TC/1.0",
"ccc": "http://cisco.com/calvados/ccc",
"nc-notifications": "urn:ietf:params:xml:ns:netmod:notification",
"notifications": "urn:ietf:params:xml:ns:netconf:notification:1.0",
"ntp": "http://cisco.com/calvados/ntp",
"opertest1": "http://www.cisco.com/panini/calvados/opertest1",
"tailf-aaa": "http://tail-f.com/ns/aaa/1.1",
"tailf-actions": "http://tail-f.com/ns/netconf/actions/1.0",
"tailf-common": "http://tail-f.com/yang/common",
"tailf-common-monitoring": "http://tail-f.com/yang/common-monitoring",
"tailf-common-query": "http://tail-f.com/ns/common/query",
"tailf-confd-monitoring": "http://tail-f.com/yang/confd-monitoring",
"tailf-netconf-query": "http://tail-f.com/ns/netconf/query",
"tailf-xsd-types": "http://tail-f.com/yang/xsd-types",
"valtest": "http://www.cisco.com/panini/calvados/valtest",
"vplatform": "http://cisco.com/panini/calvados/vplatform",
}
IDENTITY_LOOKUP = {
'Cisco-IOS-XR-ip-domain-oper:Host-address-base':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_domain_oper', 'HostAddressBase'),
'Cisco-IOS-XR-ip-domain-oper:ipv4':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_domain_oper', 'Ipv4'),
'Cisco-IOS-XR-ip-domain-oper:ipv6':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_domain_oper', 'Ipv6'),
'Cisco-IOS-XR-lib-mpp-oper:ipv4':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper', 'Ipv4'),
'Cisco-IOS-XR-lib-mpp-oper:ipv6':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper', 'Ipv6'),
'Cisco-IOS-XR-lib-mpp-oper:Mpp-af-id-base':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper', 'MppAfIdBase'),
'Cisco-IOS-XR-tty-management-oper:Host-af-id-base':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper', 'HostAfIdBase'),
'Cisco-IOS-XR-tty-management-oper:ipv4':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper', 'Ipv4'),
'Cisco-IOS-XR-tty-management-oper:ipv6':('ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper', 'Ipv6'),
}
| 85.189039
| 172
| 0.70406
| 36,184
| 214,506
| 4.031837
| 0.025785
| 0.217317
| 0.271647
| 0.099995
| 0.987271
| 0.968332
| 0.897044
| 0.829595
| 0.772024
| 0.706042
| 0
| 0.044684
| 0.074916
| 214,506
| 2,517
| 173
| 85.222884
| 0.690505
| 0
| 0
| 0
| 0
| 0.363854
| 0.830521
| 0.468768
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.000796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
160065900ffcd6fbbc3301c6f5f43e7f60b596a7
| 153
|
py
|
Python
|
orlov/libs/picture/__init__.py
|
coppelia517/orlov
|
d7ed6c061432b99ab2b75e0262db293e444fe6be
|
[
"MIT"
] | null | null | null |
orlov/libs/picture/__init__.py
|
coppelia517/orlov
|
d7ed6c061432b99ab2b75e0262db293e444fe6be
|
[
"MIT"
] | null | null | null |
orlov/libs/picture/__init__.py
|
coppelia517/orlov
|
d7ed6c061432b99ab2b75e0262db293e444fe6be
|
[
"MIT"
] | null | null | null |
""" Orlov is Multi-Platform Automation Testing Framework. """
from orlov.libs.picture.module import Picture
from orlov.libs.picture.module import Ocr
| 38.25
| 62
| 0.784314
| 21
| 153
| 5.714286
| 0.619048
| 0.15
| 0.216667
| 0.333333
| 0.533333
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124183
| 153
| 3
| 63
| 51
| 0.895522
| 0.346405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1610649f974be350d96b84fd4449157cc026a1ee
| 7,761
|
py
|
Python
|
tests/analysis/test_analyzer.py
|
hoominkani/dagda
|
8818ab96807c9755955b66ac6db6847ee4001c81
|
[
"Apache-2.0"
] | 947
|
2016-11-25T10:48:30.000Z
|
2022-03-30T20:09:00.000Z
|
tests/analysis/test_analyzer.py
|
hoominkani/dagda
|
8818ab96807c9755955b66ac6db6847ee4001c81
|
[
"Apache-2.0"
] | 96
|
2016-11-28T10:58:50.000Z
|
2021-10-19T18:32:12.000Z
|
tests/analysis/test_analyzer.py
|
hoominkani/dagda
|
8818ab96807c9755955b66ac6db6847ee4001c81
|
[
"Apache-2.0"
] | 148
|
2016-11-28T12:33:46.000Z
|
2022-03-02T01:57:21.000Z
|
#
# Licensed to Dagda under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Dagda licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest.mock import Mock
import sys, os
sys.path.insert(1, str(os.path.dirname(os.path.abspath(__file__))) + '/../../dagda')
from analysis.analyzer import Analyzer
# -- Test suite
class AnalyzerTestCase(unittest.TestCase):
def test_generate_os_report_with_empty_dep(self):
analyzer = EmptyVulnArrayAnalyzer()
report = analyzer.generate_os_report('test_image_name', [])
self.assertEqual(report['total_os_packages'], 0)
self.assertEqual(report['vuln_os_packages'], 0)
self.assertEqual(report['ok_os_packages'], 0)
self.assertEqual(len(report['os_packages_details']), 0)
def test_generate_os_report_with_NOT_empty_dep(self):
analyzer = NotEmptyVulnArrayAnalyzer()
report = analyzer.generate_os_report('test_image_name', [{'product': 'java', 'version': '1.5'}])
self.assertEqual(report['total_os_packages'], 1)
self.assertEqual(report['vuln_os_packages'], 1)
self.assertEqual(report['ok_os_packages'], 0)
self.assertEqual(len(report['os_packages_details']), 1)
self.assertEqual(report['os_packages_details'][0]['product'], 'java')
self.assertEqual(report['os_packages_details'][0]['version'], '1.5')
self.assertEqual(len(report['os_packages_details'][0]['vulnerabilities']), 6)
def test_generate_os_report_with_NOT_empty_dep_with_fp(self):
analyzer = NotEmptyVulnArrayWithFalsePositivesAnalyzer()
report = analyzer.generate_os_report('test_image_name', [{'product': 'java', 'version': '1.5'}])
self.assertEqual(report['total_os_packages'], 1)
self.assertEqual(report['vuln_os_packages'], 0)
self.assertEqual(report['ok_os_packages'], 1)
self.assertEqual(len(report['os_packages_details']), 1)
self.assertEqual(report['os_packages_details'][0]['product'], 'java')
self.assertEqual(report['os_packages_details'][0]['version'], '1.5')
self.assertEqual(len(report['os_packages_details'][0]['vulnerabilities']), 6)
def test_generate_dependencies_report_with_empty_dep(self):
analyzer = EmptyVulnArrayAnalyzer()
report = analyzer.generate_dependencies_report('test_image_name', [])
self.assertEqual(len(report['dependencies_details']['java']), 0)
self.assertEqual(len(report['dependencies_details']['python']), 0)
self.assertEqual(len(report['dependencies_details']['nodejs']), 0)
self.assertEqual(len(report['dependencies_details']['js']), 0)
self.assertEqual(len(report['dependencies_details']['ruby']), 0)
self.assertEqual(len(report['dependencies_details']['php']), 0)
self.assertEqual(report['vuln_dependencies'], 0)
def test_generate_dependencies_report_with_NOT_empty_dep(self):
analyzer = NotEmptyVulnArrayAnalyzer()
report = analyzer.generate_dependencies_report('test_image_name', ['java#java#1.5#/tmp/java.1.5.jar','python#python#2.7#/tmp/python.2.7.py'])
self.assertEqual(len(report['dependencies_details']['java']), 1)
self.assertEqual(report['dependencies_details']['java'][0]['product'], 'java')
self.assertEqual(report['dependencies_details']['java'][0]['version'], '1.5')
self.assertEqual(report['dependencies_details']['java'][0]['product_file_path'], '/tmp/java.1.5.jar')
self.assertEqual(len(report['dependencies_details']['java'][0]['vulnerabilities']), 6)
self.assertEqual(len(report['dependencies_details']['python']), 1)
self.assertEqual(report['dependencies_details']['python'][0]['product'], 'python')
self.assertEqual(report['dependencies_details']['python'][0]['version'], '2.7')
self.assertEqual(report['dependencies_details']['python'][0]['product_file_path'], '/tmp/python.2.7.py')
self.assertEqual(len(report['dependencies_details']['python'][0]['vulnerabilities']), 6)
self.assertEqual(len(report['dependencies_details']['nodejs']), 0)
self.assertEqual(len(report['dependencies_details']['js']), 0)
self.assertEqual(len(report['dependencies_details']['ruby']), 0)
self.assertEqual(len(report['dependencies_details']['php']), 0)
self.assertEqual(report['vuln_dependencies'], 2)
def test_generate_dependencies_report_with_NOT_empty_dep_with_fp(self):
analyzer = NotEmptyVulnArrayWithFalsePositivesAnalyzer()
report = analyzer.generate_dependencies_report('test_image_name', ['java#java#1.5#/tmp/java.1.5.jar','python#python#2.7#/tmp/python.2.7.py'])
self.assertEqual(len(report['dependencies_details']['java']), 1)
self.assertEqual(report['dependencies_details']['java'][0]['product'], 'java')
self.assertEqual(report['dependencies_details']['java'][0]['version'], '1.5')
self.assertEqual(report['dependencies_details']['java'][0]['product_file_path'], '/tmp/java.1.5.jar')
self.assertEqual(len(report['dependencies_details']['java'][0]['vulnerabilities']), 6)
self.assertEqual(len(report['dependencies_details']['python']), 1)
self.assertEqual(report['dependencies_details']['python'][0]['product'], 'python')
self.assertEqual(report['dependencies_details']['python'][0]['version'], '2.7')
self.assertEqual(report['dependencies_details']['python'][0]['product_file_path'], '/tmp/python.2.7.py')
self.assertEqual(len(report['dependencies_details']['python'][0]['vulnerabilities']), 6)
self.assertEqual(len(report['dependencies_details']['nodejs']), 0)
self.assertEqual(len(report['dependencies_details']['js']), 0)
self.assertEqual(len(report['dependencies_details']['ruby']), 0)
self.assertEqual(len(report['dependencies_details']['php']), 0)
self.assertEqual(report['vuln_dependencies'], 0)
# -- Mock classes
class EmptyVulnArrayAnalyzer(Analyzer):
def __init__(self):
self.is_remote = False
self.mongoDbDriver = Mock()
self.dockerDriver = Mock()
self.mongoDbDriver.get_vulnerabilities.return_value = []
class NotEmptyVulnArrayAnalyzer(Analyzer):
def __init__(self):
self.is_remote = False
self.mongoDbDriver = Mock()
self.dockerDriver = Mock()
self.mongoDbDriver.get_vulnerabilities.return_value = ['CVE-2002-2001', 'CVE-2002-2002', 'BID-1', 'BID-2',
'EXPLOIT_DB_ID-3', 'EXPLOIT_DB_ID-4']
self.mongoDbDriver.is_fp.return_value = False
class NotEmptyVulnArrayWithFalsePositivesAnalyzer(Analyzer):
def __init__(self):
self.is_remote = False
self.mongoDbDriver = Mock()
self.dockerDriver = Mock()
self.mongoDbDriver.get_vulnerabilities.return_value = ['CVE-2002-2001', 'CVE-2002-2002', 'BID-1', 'BID-2',
'EXPLOIT_DB_ID-3', 'EXPLOIT_DB_ID-4']
self.mongoDbDriver.is_fp.return_value = True
if __name__ == '__main__':
unittest.main()
| 53.895833
| 149
| 0.687283
| 923
| 7,761
| 5.568797
| 0.157096
| 0.160506
| 0.16537
| 0.12607
| 0.822568
| 0.822374
| 0.809339
| 0.809339
| 0.779961
| 0.765759
| 0
| 0.022352
| 0.158356
| 7,761
| 143
| 150
| 54.272727
| 0.764544
| 0.09625
| 0
| 0.718447
| 0
| 0
| 0.282282
| 0.019162
| 0
| 0
| 0
| 0
| 0.533981
| 1
| 0.087379
| false
| 0
| 0.038835
| 0
| 0.165049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
161196ce9075c8bed0dddd7b34d08a546c320eff
| 10,914
|
py
|
Python
|
tests/induction_machine/test_vector_machine.py
|
jbschroder/pymgrit
|
5c866c633dea3ebccf812550e0303f47771c4faf
|
[
"MIT"
] | null | null | null |
tests/induction_machine/test_vector_machine.py
|
jbschroder/pymgrit
|
5c866c633dea3ebccf812550e0303f47771c4faf
|
[
"MIT"
] | null | null | null |
tests/induction_machine/test_vector_machine.py
|
jbschroder/pymgrit
|
5c866c633dea3ebccf812550e0303f47771c4faf
|
[
"MIT"
] | null | null | null |
"""
Tests vector_heat_1d_2pts
"""
import numpy as np
from pymgrit.induction_machine.vector_machine import VectorMachine
def test_vector_machine_constructor():
"""
Test constructor
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
np.testing.assert_equal(vector_machine.u_front_size, 4)
np.testing.assert_equal(vector_machine.u_middle_size, 5)
np.testing.assert_equal(vector_machine.u_back_size, 6)
np.testing.assert_equal(vector_machine.u_front, np.zeros(4))
np.testing.assert_equal(vector_machine.u_middle, np.zeros(5))
np.testing.assert_equal(vector_machine.u_back, np.zeros(6))
np.testing.assert_equal(vector_machine.jl, 0)
np.testing.assert_equal(vector_machine.ua, 0)
np.testing.assert_equal(vector_machine.ub, 0)
np.testing.assert_equal(vector_machine.uc, 0)
np.testing.assert_equal(vector_machine.ia, 0)
np.testing.assert_equal(vector_machine.ib, 0)
np.testing.assert_equal(vector_machine.ic, 0)
np.testing.assert_equal(vector_machine.tr, 0)
def test_vector_machine_add():
"""
Test __add__
"""
vector_machine_1 = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine_1.u_front = np.ones(4)
vector_machine_1.u_middle = np.ones(5)
vector_machine_1.u_back = np.ones(6)
vector_machine_1.ua = 1
vector_machine_1.ub = 2
vector_machine_1.uc = 3
vector_machine_1.ia = 4
vector_machine_1.ib = 5
vector_machine_1.ic = 6
vector_machine_1.tr = 7
vector_machine_1.jl = 8
vector_machine_2 = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine_2.u_front = 2 * np.ones(4)
vector_machine_2.u_middle = 3 * np.ones(5)
vector_machine_2.u_back = 4 * np.ones(6)
vector_machine_2.ua = 11
vector_machine_2.ub = 12
vector_machine_2.uc = 13
vector_machine_2.ia = 14
vector_machine_2.ib = 15
vector_machine_2.ic = 16
vector_machine_2.tr = 17
vector_machine_2.jl = 18
vector_machine_res = vector_machine_1 + vector_machine_2
np.testing.assert_equal(vector_machine_res.u_front, 3 * np.ones(4))
np.testing.assert_equal(vector_machine_res.u_middle, 4 * np.ones(5))
np.testing.assert_equal(vector_machine_res.u_back, 5 * np.ones(6))
np.testing.assert_equal(vector_machine_res.ua, 12)
np.testing.assert_equal(vector_machine_res.ub, 14)
np.testing.assert_equal(vector_machine_res.uc, 16)
np.testing.assert_equal(vector_machine_res.ia, 18)
np.testing.assert_equal(vector_machine_res.ib, 20)
np.testing.assert_equal(vector_machine_res.ic, 22)
np.testing.assert_equal(vector_machine_res.tr, 24)
np.testing.assert_equal(vector_machine_res.jl, 26)
def test_vector_machine_sub():
"""
Test __sub__
"""
"""
Test __add__
"""
vector_machine_1 = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine_1.u_front = np.ones(4)
vector_machine_1.u_middle = np.ones(5)
vector_machine_1.u_back = np.ones(6)
vector_machine_1.ua = 1
vector_machine_1.ub = 2
vector_machine_1.uc = 3
vector_machine_1.ia = 4
vector_machine_1.ib = 5
vector_machine_1.ic = 6
vector_machine_1.tr = 7
vector_machine_1.jl = 8
vector_machine_2 = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine_2.u_front = 2 * np.ones(4)
vector_machine_2.u_middle = 3 * np.ones(5)
vector_machine_2.u_back = 4 * np.ones(6)
vector_machine_2.ua = 11
vector_machine_2.ub = 12
vector_machine_2.uc = 13
vector_machine_2.ia = 14
vector_machine_2.ib = 15
vector_machine_2.ic = 16
vector_machine_2.tr = 17
vector_machine_2.jl = 18
vector_machine_res = vector_machine_2 - vector_machine_1
np.testing.assert_equal(vector_machine_res.u_front, np.ones(4))
np.testing.assert_equal(vector_machine_res.u_middle, 2 * np.ones(5))
np.testing.assert_equal(vector_machine_res.u_back, 3 * np.ones(6))
np.testing.assert_equal(vector_machine_res.jl, 10)
np.testing.assert_equal(vector_machine_res.ua, 10)
np.testing.assert_equal(vector_machine_res.ub, 10)
np.testing.assert_equal(vector_machine_res.uc, 10)
np.testing.assert_equal(vector_machine_res.ia, 10)
np.testing.assert_equal(vector_machine_res.ib, 10)
np.testing.assert_equal(vector_machine_res.ic, 10)
np.testing.assert_equal(vector_machine_res.tr, 10)
def test_vector_machine_norm():
"""
Test norm()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine.u_front = 2 * np.ones(4)
vector_machine.u_middle = 3 * np.ones(5)
vector_machine.u_back = 4 * np.ones(6)
vector_machine.ua = 11
vector_machine.ub = 12
vector_machine.uc = 13
vector_machine.ia = 14
vector_machine.ib = 15
vector_machine.ic = 16
vector_machine.tr = 17
vector_machine.jl = 18
np.testing.assert_equal(np.linalg.norm(np.array([2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4])),
vector_machine.norm())
def test_vector_machine_clone_zero():
"""
Test clone_zero()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_heat_1d_2pts_clone = vector_machine.clone_zero()
np.testing.assert_equal(True, isinstance(vector_heat_1d_2pts_clone, VectorMachine))
np.testing.assert_equal(vector_machine.u_front_size, 4)
np.testing.assert_equal(vector_machine.u_middle_size, 5)
np.testing.assert_equal(vector_machine.u_back_size, 6)
np.testing.assert_equal(vector_machine.u_front, np.zeros(4))
np.testing.assert_equal(vector_machine.u_middle, np.zeros(5))
np.testing.assert_equal(vector_machine.u_back, np.zeros(6))
np.testing.assert_equal(vector_machine.jl, 0)
np.testing.assert_equal(vector_machine.ua, 0)
np.testing.assert_equal(vector_machine.ub, 0)
np.testing.assert_equal(vector_machine.uc, 0)
np.testing.assert_equal(vector_machine.ia, 0)
np.testing.assert_equal(vector_machine.ib, 0)
np.testing.assert_equal(vector_machine.ic, 0)
np.testing.assert_equal(vector_machine.tr, 0)
def test_vector_vector_machine_cole_rand():
"""
Test clone_rand()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_heat_1d_2pts_clone = vector_machine.clone_rand()
np.testing.assert_equal(True, isinstance(vector_heat_1d_2pts_clone, VectorMachine))
np.testing.assert_equal(vector_machine.u_front_size, 4)
np.testing.assert_equal(vector_machine.u_middle_size, 5)
np.testing.assert_equal(vector_machine.u_back_size, 6)
np.testing.assert_equal(vector_machine.u_front, np.zeros(4))
np.testing.assert_equal(vector_machine.u_middle, np.zeros(5))
np.testing.assert_equal(vector_machine.u_back, np.zeros(6))
np.testing.assert_equal(vector_machine.jl, 0)
np.testing.assert_equal(vector_machine.ua, 0)
np.testing.assert_equal(vector_machine.ub, 0)
np.testing.assert_equal(vector_machine.uc, 0)
np.testing.assert_equal(vector_machine.ia, 0)
np.testing.assert_equal(vector_machine.ib, 0)
np.testing.assert_equal(vector_machine.ic, 0)
np.testing.assert_equal(vector_machine.tr, 0)
def test_vector_machine_set_values():
"""
Test the set_values()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine.set_values(values=np.array([1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3]),
jl=1,
ia=2,
ib=3,
ic=4,
ua=5,
ub=6,
uc=7,
tr=8)
np.testing.assert_equal(vector_machine.u_front_size, 4)
np.testing.assert_equal(vector_machine.u_middle_size, 5)
np.testing.assert_equal(vector_machine.u_back_size, 6)
np.testing.assert_equal(vector_machine.u_front, np.ones(4))
np.testing.assert_equal(vector_machine.u_middle, 2 * np.ones(5))
np.testing.assert_equal(vector_machine.u_back, 3 * np.ones(6))
np.testing.assert_equal(vector_machine.jl, 1)
np.testing.assert_equal(vector_machine.ia, 2)
np.testing.assert_equal(vector_machine.ib, 3)
np.testing.assert_equal(vector_machine.ic, 4)
np.testing.assert_equal(vector_machine.ua, 5)
np.testing.assert_equal(vector_machine.ub, 6)
np.testing.assert_equal(vector_machine.uc, 7)
np.testing.assert_equal(vector_machine.tr, 8)
def test_vector_vector_machine_get_values():
"""
Test get_values()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
np.testing.assert_equal(vector_machine.get_values(), np.zeros(15))
def test_vector_vector_machine_pack():
"""
Test get_values()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine.set_values(values=np.array([1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3]),
jl=1,
ia=2,
ib=3,
ic=4,
ua=5,
ub=6,
uc=7,
tr=8)
np.testing.assert_equal(vector_machine.pack(),
[np.array([1, 1, 1, 1]), np.array([2, 2, 2, 2, 2]), np.array([3, 3, 3, 3, 3, 3]), 1, 2, 3,
4, 5, 6, 7, 8])
import pickle
try:
pickle.dumps(vector_machine.pack())
except pickle.PicklingError:
pickle_test = False
pickle_test = True
np.testing.assert_equal(pickle_test, True)
def test_vector_vector_machine_unpack():
"""
Test get_values()
"""
vector_machine = VectorMachine(u_front_size=4, u_middle_size=5, u_back_size=6)
vector_machine.unpack(
values=[np.array([1, 1, 1, 1]), np.array([2, 2, 2, 2, 2]), np.array([3, 3, 3, 3, 3, 3]), 1, 2, 3,
4, 5, 6, 7, 8])
np.testing.assert_equal(vector_machine.u_front_size, 4)
np.testing.assert_equal(vector_machine.u_middle_size, 5)
np.testing.assert_equal(vector_machine.u_back_size, 6)
np.testing.assert_equal(vector_machine.u_front, np.ones(4))
np.testing.assert_equal(vector_machine.u_middle, 2 * np.ones(5))
np.testing.assert_equal(vector_machine.u_back, 3 * np.ones(6))
np.testing.assert_equal(vector_machine.jl, 1)
np.testing.assert_equal(vector_machine.ia, 2)
np.testing.assert_equal(vector_machine.ib, 3)
np.testing.assert_equal(vector_machine.ic, 4)
np.testing.assert_equal(vector_machine.ua, 5)
np.testing.assert_equal(vector_machine.ub, 6)
np.testing.assert_equal(vector_machine.uc, 7)
np.testing.assert_equal(vector_machine.tr, 8)
| 37.376712
| 118
| 0.687466
| 1,733
| 10,914
| 4
| 0.04674
| 0.346942
| 0.21206
| 0.282747
| 0.903203
| 0.885747
| 0.884737
| 0.883583
| 0.792556
| 0.780006
| 0
| 0.047037
| 0.197453
| 10,914
| 291
| 119
| 37.505155
| 0.744377
| 0.017592
| 0
| 0.701422
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.464455
| 1
| 0.047393
| false
| 0
| 0.014218
| 0
| 0.061611
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
16a2199115a6c646af670f35f03dc2115566c96a
| 1,532
|
py
|
Python
|
tests/test_trimf.py
|
amirrr/pyfuzzy
|
97e88f7b014e9791fb0a3d07d0727867d27ea9d3
|
[
"Apache-2.0"
] | 9
|
2019-04-11T07:03:04.000Z
|
2021-05-12T13:01:53.000Z
|
tests/test_trimf.py
|
amirrr/pyfuzzy
|
97e88f7b014e9791fb0a3d07d0727867d27ea9d3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_trimf.py
|
amirrr/pyfuzzy
|
97e88f7b014e9791fb0a3d07d0727867d27ea9d3
|
[
"Apache-2.0"
] | 13
|
2019-04-07T19:19:03.000Z
|
2019-08-20T11:53:23.000Z
|
import unittest
from pyfuzzy.mf import trimf
class TriMFTestCase(unittest.TestCase):
def test_Trimf1_membership_function(self):
self.assertEqual(trimf.trimf(-1, [3.0, 6.0, 8.0]), 0.0)
def test_Trimf2_membership_function(self):
self.assertEqual(trimf.trimf(0, [3.0, 6.0, 8.0]), 0.0)
def test_Trimf3_membership_function(self):
self.assertEqual(trimf.trimf(2, [3.0, 6.0, 8.0]), 0.0)
def test_Trimf4_membership_function(self):
self.assertEqual(trimf.trimf(2, [3.0, 6.0, 10.0]), 0.0)
def test_Trimf5_membership_function(self):
self.assertEqual(trimf.trimf(2, [3.0, 6.0, 100.0]), 0.0)
def test_Trimf6_membership_function(self):
self.assertEqual(trimf.trimf(6, [3.0, 6.0, 8.0]), 1)
def test_Trimf7_membership_function(self):
self.assertEqual(trimf.trimf(1001, [3.0, 5.0, 1000.0]), 0)
def test_Trimf8_membership_function(self):
self.assertRaises(TypeError, lambda: trimf.trimf(0, [1, 0.0, 6.1]))
def test_Trimf9_membership_function(self):
self.assertRaises(TypeError, lambda: trimf.trimf(0, [1]))
def test_Trimf10_membership_function(self):
self.assertRaises(TypeError, lambda: trimf.trimf(0, [1, 2.5]))
def test_Trimf11_membership_function(self):
self.assertRaises(TypeError, lambda: trimf.trimf(0, [1, 30.5, 500]))
def test_Trimf12_membership_function(self):
self.assertRaises(TypeError, lambda: trimf.trimf(0, [1, 2.5, 300, 900]))
if __name__ == '__main__':
unittest.main()
| 34.044444
| 80
| 0.679504
| 230
| 1,532
| 4.334783
| 0.208696
| 0.084253
| 0.264794
| 0.312939
| 0.742227
| 0.722167
| 0.716148
| 0.527583
| 0.527583
| 0.527583
| 0
| 0.090623
| 0.171671
| 1,532
| 44
| 81
| 34.818182
| 0.695035
| 0
| 0
| 0
| 0
| 0
| 0.005222
| 0
| 0
| 0
| 0
| 0
| 0.413793
| 1
| 0.413793
| false
| 0
| 0.068966
| 0
| 0.517241
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
16aed43fa3feb5d2f68182721fb810e5a600a9f7
| 4,676
|
py
|
Python
|
tests/defines.py
|
nickzhuang0613/lvgl
|
b7e6f6763d6fa967bacaca5a60ee33321f73c8a5
|
[
"MIT"
] | 15
|
2021-06-15T08:01:41.000Z
|
2022-02-18T11:07:31.000Z
|
tests/defines.py
|
nickzhuang0613/lvgl
|
b7e6f6763d6fa967bacaca5a60ee33321f73c8a5
|
[
"MIT"
] | null | null | null |
tests/defines.py
|
nickzhuang0613/lvgl
|
b7e6f6763d6fa967bacaca5a60ee33321f73c8a5
|
[
"MIT"
] | 5
|
2021-07-02T00:48:33.000Z
|
2022-02-10T02:23:21.000Z
|
minimal_monochrome = {
"LV_COLOR_DEPTH":1,
"LV_MEM_SIZE":64 * 1024,
"LV_DPI_DEF":40,
"LV_DRAW_COMPLEX":0,
"LV_USE_METER":0,
"LV_USE_LOG":1,
"LV_USE_ASSERT_NULL":0,
"LV_USE_ASSERT_MALLOC":0,
"LV_USE_ASSERT_MEM_INTEGRITY":0,
"LV_USE_ASSERT_OBJ":0,
"LV_USE_ASSERT_STYLE":0,
"LV_USE_USER_DATA": 0,
"LV_FONT_UNSCII_8":1,
"LV_USE_BIDI": 0,
"LV_USE_ARABIC_PERSIAN_CHARS":0,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_14\\\"",
}
normal_8bit = {
"LV_COLOR_DEPTH":8,
"LV_MEM_SIZE":64 * 1024,
"LV_DPI_DEF":40,
"LV_DRAW_COMPLEX":1,
"LV_USE_LOG":1,
"LV_USE_ASSERT_NULL":0,
"LV_USE_ASSERT_MALLOC":0,
"LV_USE_ASSERT_MEM_INTEGRITY":0,
"LV_USE_ASSERT_OBJ":0,
"LV_USE_ASSERT_STYLE":0,
"LV_USE_USER_DATA": 0,
"LV_FONT_UNSCII_8":1,
"LV_USE_FONT_SUBPX": 1,
"LV_USE_BIDI": 0,
"LV_USE_ARABIC_PERSIAN_CHARS":0,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_14\\\"",
}
minimal_16bit = {
"LV_COLOR_DEPTH":16,
"LV_MEM_CUSTOM":1,
"LV_DPI_DEF":40,
"LV_DRAW_COMPLEX":0,
"LV_USE_METER":0,
"LV_USE_LOG":1,
"LV_USE_ASSERT_NULL":0,
"LV_USE_ASSERT_MALLOC":0,
"LV_USE_ASSERT_MEM_INTEGRITY":0,
"LV_USE_ASSERT_OBJ":0,
"LV_USE_ASSERT_STYLE":0,
"LV_USE_USER_DATA": 0,
"LV_FONT_UNSCII_8":1,
"LV_USE_BIDI": 0,
"LV_USE_ARABIC_PERSIAN_CHARS":0,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_14\\\"",
}
normal_16bit_swap = {
"LV_COLOR_DEPTH":16,
"LV_COLOR_16_SWAP":1,
"LV_MEM_SIZE":64 * 1024,
"LV_DPI_DEF":40,
"LV_DRAW_COMPLEX":1,
"LV_USE_LOG":1,
"LV_USE_ASSERT_NULL":0,
"LV_USE_ASSERT_MALLOC":0,
"LV_USE_ASSERT_MEM_INTEGRITY":0,
"LV_USE_ASSERT_OBJ":0,
"LV_USE_ASSERT_STYLE":0,
"LV_USE_USER_DATA": 0,
"LV_FONT_UNSCII_8":1,
"LV_USE_FONT_SUBPX": 1,
"LV_USE_BIDI": 0,
"LV_USE_ARABIC_PERSIAN_CHARS":0,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_14\\\"",
}
full_32bit = {
"LV_COLOR_DEPTH":32,
"LV_MEM_SIZE":8 * 1024 * 1024,
"LV_DPI_DEF":160,
"LV_DRAW_COMPLEX":1,
"LV_SHADOW_CACHE_SIZE":1,
"LV_IMG_CACHE_DEF_SIZE":32,
"LV_USE_LOG":1,
"LV_USE_LOG_LEVEL":"LV_LOG_LEVEL_TRACE",
"LV_LOG_PRINTF":1,
"LV_USE_FONT_SUBPX": 1,
"LV_FONT_SUBPX_BGR":1,
"LV_USE_PERF_MONITOR":1,
"LV_USE_ASSERT_NULL":1,
"LV_USE_ASSERT_MALLOC":1,
"LV_USE_ASSERT_MEM_INTEGRITY":1,
"LV_USE_ASSERT_OBJ":1,
"LV_USE_ASSERT_STYLE":1,
"LV_USE_USER_DATA": 1,
"LV_USE_LARGE_COORD": 1,
"LV_FONT_MONTSERRAT_8":1,
"LV_FONT_MONTSERRAT_10":1,
"LV_FONT_MONTSERRAT_12":1,
"LV_FONT_MONTSERRAT_14":1,
"LV_FONT_MONTSERRAT_16":1,
"LV_FONT_MONTSERRAT_18":1,
"LV_FONT_MONTSERRAT_20":1,
"LV_FONT_MONTSERRAT_22":1,
"LV_FONT_MONTSERRAT_24":1,
"LV_FONT_MONTSERRAT_26":1,
"LV_FONT_MONTSERRAT_28":1,
"LV_FONT_MONTSERRAT_30":1,
"LV_FONT_MONTSERRAT_32":1,
"LV_FONT_MONTSERRAT_34":1,
"LV_FONT_MONTSERRAT_36":1,
"LV_FONT_MONTSERRAT_38":1,
"LV_FONT_MONTSERRAT_40":1,
"LV_FONT_MONTSERRAT_42":1,
"LV_FONT_MONTSERRAT_44":1,
"LV_FONT_MONTSERRAT_46":1,
"LV_FONT_MONTSERRAT_48":1,
"LV_FONT_MONTSERRAT_12_SUBPX":1,
"LV_FONT_MONTSERRAT_28_COMPRESSED":1,
"LV_FONT_DEJAVU_16_PERSIAN_HEBREW":1,
"LV_FONT_SIMSUN_16_CJK":1,
"LV_FONT_UNSCII_8":1,
"LV_FONT_UNSCII_16":1,
"LV_FONT_FMT_TXT_LARGE":1,
"LV_USE_FONT_COMPRESSED":1,
"LV_USE_BIDI": 1,
"LV_USE_ARABIC_PERSIAN_CHARS":1,
"LV_USE_PERF_MONITOR":1,
"LV_USE_MEM_MONITOR":1,
"LV_LABEL_TEXT_SELECTION":1,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_24\\\"",
}
test = {
"LV_COLOR_DEPTH":32,
"LV_MEM_SIZE":2 * 1024 * 1024,
"LV_SHADOW_CACHE_SIZE":10*1024,
"LV_IMG_CACHE_DEF_SIZE":32,
"LV_USE_LOG":1,
"LV_LOG_PRINTF":1,
"LV_USE_FONT_SUBPX": 1,
"LV_FONT_SUBPX_BGR":1,
"LV_USE_ASSERT_NULL":0,
"LV_USE_ASSERT_MALLOC":0,
"LV_USE_ASSERT_MEM_INTEGRITY":0,
"LV_USE_ASSERT_OBJ":0,
"LV_USE_ASSERT_STYLE":0,
"LV_USE_USER_DATA": 1,
"LV_USE_LARGE_COORD": 1,
"LV_FONT_MONTSERRAT_14":1,
"LV_FONT_MONTSERRAT_16":1,
"LV_FONT_MONTSERRAT_18":1,
"LV_FONT_MONTSERRAT_24":1,
"LV_FONT_MONTSERRAT_48":1,
"LV_FONT_MONTSERRAT_12_SUBPX":1,
"LV_FONT_MONTSERRAT_28_COMPRESSED":1,
"LV_FONT_DEJAVU_16_PERSIAN_HEBREW":1,
"LV_FONT_SIMSUN_16_CJK":1,
"LV_FONT_UNSCII_8":1,
"LV_FONT_UNSCII_16":1,
"LV_FONT_FMT_TXT_LARGE":1,
"LV_USE_FONT_COMPRESSED":1,
"LV_USE_BIDI": 1,
"LV_USE_ARABIC_PERSIAN_CHARS":1,
"LV_LABEL_TEXT_SELECTION":1,
"LV_BUILD_EXAMPLES":1,
"LV_FONT_DEFAULT":"\\\"&lv_font_montserrat_14\\\"",
}
| 20.155172
| 53
| 0.697819
| 802
| 4,676
| 3.477556
| 0.110973
| 0.097885
| 0.120473
| 0.182861
| 0.831481
| 0.788813
| 0.787738
| 0.771244
| 0.757978
| 0.744353
| 0
| 0.074279
| 0.147776
| 4,676
| 231
| 54
| 20.242424
| 0.625596
| 0
| 0
| 0.733728
| 0
| 0
| 0.616898
| 0.266738
| 0
| 0
| 0
| 0
| 0.177515
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
16cd7f57044fb6872e13648537a71642276382c2
| 82,516
|
py
|
Python
|
driver/basic_predict/inf_inwindow_slice_2d.py
|
qgking/DASC_COVID19
|
3300516b1d0e9896e2fb2ffda8527e0e1a1fcf2c
|
[
"MIT"
] | 4
|
2021-04-21T05:09:49.000Z
|
2022-01-17T13:02:45.000Z
|
driver/basic_predict/inf_inwindow_slice_2d.py
|
qgking/DASC_COVID19
|
3300516b1d0e9896e2fb2ffda8527e0e1a1fcf2c
|
[
"MIT"
] | null | null | null |
driver/basic_predict/inf_inwindow_slice_2d.py
|
qgking/DASC_COVID19
|
3300516b1d0e9896e2fb2ffda8527e0e1a1fcf2c
|
[
"MIT"
] | 1
|
2021-07-08T02:20:43.000Z
|
2021-07-08T02:20:43.000Z
|
# -*- coding: utf-8 -*-
# @Time : 20/5/23 21:31
# @Author : qgking
# @Email : qgking@tju.edu.cn
# @Software: PyCharm
# @Desc : inf_inwindow_slice_2d.py
import matplotlib
import sys
sys.path.extend(["../../", "../", "./"])
from common.base_utls import *
from common.data_utils import *
import torch
from skimage.transform import resize
import torch.nn.functional as F
from torch.cuda import empty_cache
def impaintshow(img, seg, preds, output_dir, fname):
"""Takes raw image img, seg in range 0-2, list of predictions in range 0-2"""
img = np.squeeze(img)
seg = np.squeeze(seg)
preds = np.squeeze(preds)
fig = plt.figure()
ALPHA = 0.8
n_plots = 1
plt.set_cmap('gray')
pre = preds == 1
diffinf = np.logical_xor(seg == 1, pre)
title = ""
plt.title(title)
plt.imshow(img)
# plt.hold(True)
# Liver prediction
plt.imshow(np.ma.masked_where(pre == 0, pre), cmap="Greens", vmin=0.1, vmax=1.2, alpha=ALPHA)
# plt.hold(True)
# Lesion prediction
plt.imshow(np.ma.masked_where(diffinf == 0, diffinf), cmap="Reds", vmin=0.1, vmax=1.2, alpha=ALPHA)
# plt.hold(True)
plt.axis('off')
fig.set_size_inches(img.shape[0] / 100.0 / 3.0, img.shape[0] / 100.0 / 3.0)
plt.gca().xaxis.set_major_locator(plt.NullLocator())
plt.gca().yaxis.set_major_locator(plt.NullLocator())
plt.subplots_adjust(top=1, bottom=0, left=0, right=1, hspace=0, wspace=0)
plt.margins(0, 0)
plt.savefig(join(output_dir, fname + '.png'), transparent=True, dpi=300, pad_inches=0, bbox_inches='tight')
plt.close()
# crop x and y
def predict_inf_inwindow_slide_2d(seg_help, model, covid_test_data, thres=0.7):
seg_help.model.eval()
model.eval()
img_cols = seg_help.config.patch_x
img_rows = seg_help.config.patch_y
img_deps = seg_help.config.patch_z
segmentation_metrics = {
'Jaccard': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
output_dir = join(seg_help.config.tmp_dir, 'infer')
mkdir_if_not_exist([output_dir])
for file_path in covid_test_data:
# print(file_path)
file_name = basename(file_path)
print(file_name)
scans = np.load(file_path)
img = scans[0]
current_test = img.copy()
if 'MosMedData' in file_path:
lung = scans[2]
infection = scans[1]
nrow = 8
elif 'COVID-19-CT' in file_path:
lung = scans[1]
infection = scans[2]
nrow = 16
minx, maxx, miny, maxy, minz, maxz = min_max_voi(lung, superior=3, inferior=3)
x = current_test.shape[0]
y = current_test.shape[1]
z = current_test.shape[2]
score = np.zeros((seg_help.config.classes, maxx - minx, maxy - miny, maxz - minz), dtype='float32')
score_num = np.zeros((seg_help.config.classes, maxx - minx, maxy - miny, maxz - minz), dtype='int16')
current_test_cut = current_test[minx:maxx, miny:maxy, minz:maxz]
# normalization
# current_test_cut = (current_test_cut - seg_help.mean) / seg_help.std
current_inf_cut = infection[minx:maxx, miny:maxy, minz:maxz]
num = 0
flo = int(np.floor(img_deps / 2))
over = math.ceil(score.shape[1] / img_cols) + 1
xstep = math.ceil((over * img_cols - score.shape[1]) / (over - 1))
xstep = max(img_cols // 2, xstep)
over = math.ceil(score.shape[2] / img_rows) + 1
ystep = math.ceil((over * img_rows - score.shape[2]) / (over - 1))
ystep = max(img_rows // 2, ystep)
x_slices = np.arange(0, score.shape[1], xstep)
y_slices = np.arange(0, score.shape[2], ystep)
# print('x step %d, y step %d' % (xstep, ystep))
# print('total slices %d' % (len(x_slices) * len(x_slices) * score.shape[-1]))
for i in range(len(x_slices)):
deep = x_slices[i]
cols = deep if deep + img_cols < score.shape[1] else score.shape[1] - img_cols
for j in range(len(y_slices)):
height = y_slices[j]
rows = height if height + img_rows < score.shape[2] else score.shape[2] - img_rows
for c in range(flo, score.shape[-1] - flo, 1):
cropp_img = current_test_cut[cols:cols + img_cols, rows:rows + img_rows,
c - flo: c + img_deps - flo]
cropp_infection = current_inf_cut[cols:cols + img_cols, rows:rows + img_rows,
c - flo: c + img_deps - flo]
assert cropp_img.shape == (img_cols, img_rows, img_deps)
num += 1
box_test = torch.from_numpy(np.transpose(cropp_img, (2, 0, 1)))
box_test = torch.unsqueeze(box_test, dim=0)
box_test = box_test.to(seg_help.equipment).float()
cropp_infection = torch.from_numpy(np.transpose(cropp_infection, (2, 0, 1)))
cropp_infection = torch.unsqueeze(cropp_infection, dim=0)
cropp_infection = cropp_infection.to(seg_help.equipment).float()
with torch.no_grad():
patch_test_mask, _, _ = model(box_test)
if isinstance(patch_test_mask, (tuple, list)):
patch_test_mask = patch_test_mask[0]
patch_test_mask = torch.softmax(patch_test_mask, dim=1)
score[:, cols:cols + img_cols, rows:rows + img_rows,
c] += patch_test_mask.squeeze().detach().cpu().numpy()
score_num[:, cols:cols + img_cols, rows:rows + img_rows,
c] += 1
# visual_batch(box_test, output_dir, file_name[:-4] + '_' + str(num) + "_images",
# channel=1,
# nrow=nrow)
# visual_batch(cropp_infection, output_dir, file_name[:-4] + '_' + str(num) + "_label",
# channel=1, nrow=nrow)
# prob_max = patch_test_mask[:, 1, :, :]
# image_save = prob_max.unsqueeze(1).contiguous()
# visual_batch(image_save, output_dir, file_name[:-4] + '_' + str(num) + "_predict",
# channel=1,
# nrow=nrow)
score_ = score.copy()
score_num = np.where(score_num == 0, 1, score_num)
score_ = (score_ / (score_num)).copy()
predict = score_[seg_help.config.classes - 1]
score_final = np.zeros((x, y, z), dtype='float32')
score_final[minx:maxx, miny:maxy, minz:maxz] = predict
fusion_final_predict = np.where(score_final > thres, 1, 0)
fine_pred = ndimage.binary_dilation(fusion_final_predict, iterations=1).astype(fusion_final_predict.dtype)
score_final_torch = torch.from_numpy(score_final[minx:maxx, miny:maxy, minz:maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
fine_pred_torch = torch.from_numpy(fine_pred[minx:maxx, miny:maxy, minz:maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
imm = img[minx: maxx, miny: maxy, minz: maxz]
img_torch = torch.from_numpy(imm).unsqueeze(0).unsqueeze(0).contiguous()
infection_torch = torch.from_numpy(infection[minx:maxx, miny:maxy, minz:maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
visual_batch(img_torch, output_dir, file_name[:-4] + "_images", channel=1, nrow=nrow)
visual_batch(infection_torch, output_dir, file_name[:-4] + "_label", channel=1, nrow=nrow)
visual_batch(fine_pred_torch, output_dir, file_name[:-4] + "_predict", channel=1, nrow=nrow)
visual_batch(score_final_torch, output_dir, file_name[:-4] + "_predict_score", channel=1,
nrow=nrow)
scores = compute_all_metric_for_single_seg(infection, fine_pred)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += ('\n' + m + ': {val:.9f} '.format(val=lesion_segmentation_metrics[m]))
print(info)
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total resize x and y. not crop x and y
def predict_inf_inwindow_2d(seg_help, model, covid_test_data, thres=0.7):
seg_help.model.eval()
model.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
resize_z = seg_help.config.patch_z
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
output_dir = join(seg_help.config.tmp_dir, 'infer')
mkdir_if_not_exist([output_dir])
for file_path in covid_test_data:
# print(file_path)
file_name = basename(file_path)
print(file_name)
scans = np.load(file_path)
img = scans[0]
if 'MosMedData' in file_path:
lung = scans[2]
infection = scans[1]
nrow = 8
elif 'COVID-19-CT' in file_path:
lung = scans[1]
infection = scans[2]
nrow = 16
minx, maxx, miny, maxy, minz, maxz = min_max_voi(lung, superior=3, inferior=3)
print((minx, maxx, miny, maxy, minz, maxz))
cropped_im = img[minx: maxx, miny: maxy, :]
cropped_if = infection[minx: maxx, miny: maxy, :]
flo = int(np.floor(resize_z / 2))
score = np.zeros((seg_help.config.classes, resize_x, resize_y, img.shape[-1]), dtype='float32')
resized_img = resize(cropped_im, (resize_x, resize_y, img.shape[-1]), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y, img.shape[-1]), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
# normalization
# resized_img = (resized_img - seg_help.mean) / seg_help.std
minz = max(flo, minz)
maxz = min(cropped_if.shape[-1] - flo, maxz)
for c in range(minz, maxz, 1):
cropp_img = resized_img[:, :, c - flo: c + resize_z - flo].copy()
cropp_infection = resized_infection[:, :, c - flo: c + resize_z - flo].copy()
cropp_img = np.transpose(cropp_img, (2, 0, 1))
cropp_infection = np.transpose(cropp_infection, (2, 0, 1))
box_test = torch.from_numpy(np.expand_dims(cropp_img, 0))
cropp_infection_test = torch.from_numpy(np.expand_dims(cropp_infection, 0))
box_test = box_test.to(seg_help.equipment).float()
with torch.no_grad():
logits, _, _ = seg_help.model(box_test)
if isinstance(logits, (tuple, list)):
logits = logits[0]
patch_test_mask = torch.softmax(logits, dim=1)
score[:, :, :, c] += patch_test_mask.squeeze().detach().cpu().numpy()
del box_test
# visual_batch(box_test, output_dir, file_name[:-4] + '_' + str(c) + "_images", channel=1,
# nrow=nrow)
# visual_batch(cropp_infection_test, output_dir, file_name[:-4] + '_' + str(c) + "_label",
# channel=1, nrow=nrow)
# prob_max = patch_test_mask[:, 1, :, :]
# image_save = prob_max.unsqueeze(1).contiguous()
# visual_batch(image_save, output_dir, file_name[:-4] + '_' + str(c) + "_predict", channel=1,
# nrow=nrow)
score_ = score
predict = score_[1]
sc = np.zeros((1, 1, predict.shape[0], predict.shape[1], predict.shape[2]), dtype='float32')
sc[0, 0, :, :, :] = predict
up_predict = F.interpolate(torch.from_numpy(sc),
size=(cropped_im.shape[0], cropped_im.shape[1], cropped_im.shape[2]),
mode='trilinear',
align_corners=True)
up_predict = torch.squeeze(up_predict)
score = np.clip(up_predict, 0, 1)
score_final = np.zeros((img.shape[0], img.shape[1], img.shape[2]), dtype='int16')
score_final[minx: maxx, miny: maxy, :] = score
fine_pred = np.where(score_final > thres, 1, 0)
# fine_pred = ndimage.binary_dilation(fusion_final_predict, iterations=1).astype(fusion_final_predict.dtype)
score_final_torch = torch.from_numpy(score_final[minx: maxx, miny: maxy, minz: maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
fine_pred_torch = torch.from_numpy(fine_pred[minx: maxx, miny: maxy, minz: maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
imm = img[minx: maxx, miny: maxy, minz: maxz]
img_torch = torch.from_numpy(imm).unsqueeze(0).unsqueeze(0).contiguous()
infection_torch = torch.from_numpy(infection[minx: maxx, miny: maxy, minz: maxz]).unsqueeze(0).unsqueeze(
0).contiguous()
visual_batch(img_torch, output_dir, file_name[:-4] + "_images", channel=1, nrow=nrow)
visual_batch(infection_torch, output_dir, file_name[:-4] + "_label", channel=1, nrow=nrow)
visual_batch(fine_pred_torch, output_dir, file_name[:-4] + "_predict", channel=1, nrow=nrow)
visual_batch(score_final_torch, output_dir, file_name[:-4] + "_predict_score", channel=1,
nrow=nrow)
scores = compute_all_metric_for_single_seg(infection, fine_pred)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += ('\n' + m + ': {val:.9f} '.format(val=lesion_segmentation_metrics[m]))
print(info)
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total resize x and y. not crop x and y, sequense
def predict_inf_inwindow_2d_seq(seg_help, model, covid_test_data, thres=0.7):
seg_help.model.eval()
model.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
output_dir = join(seg_help.config.tmp_dir, 'infer')
mkdir_if_not_exist([output_dir])
for file_path in covid_test_data:
# print(file_path)
file_name = basename(file_path)
scans = np.load(file_path)
img = scans[0]
if 'MosMedData' in file_path:
lung = scans[2]
infection = scans[1]
nrow = 8
elif 'COVID-19-CT' in file_path:
lung = scans[1]
infection = scans[2]
nrow = 16
minx, maxx, miny, maxy, minz, maxz = min_max_voi(lung, superior=3, inferior=3)
cropped_im = img[minx: maxx, miny: maxy, minz: maxz]
cropped_if = infection[minx: maxx, miny: maxy, minz: maxz]
resized_img = resize(cropped_im, (resize_x, resize_y, maxz - minz), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y, maxz - minz), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.permute(2, 0, 1).squeeze(0)
su_label = su_label.permute(2, 0, 1).squeeze(0)
total_slices = su_data.size(0)
patches_img = []
cam_total = []
seg_total = []
for s in range(total_slices):
cropp_img = su_data[s, :, :].unsqueeze(0)
cropp_img = cropp_img.unsqueeze(0)
patches_img.append(cropp_img)
if s % seg_help.config.test_batch_size == 0 or s == total_slices - 1:
patches_img = torch.cat(patches_img, dim=0)
patches_img = patches_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
out, feature, cam = model(patches_img)
cam_total.append(cam)
seg_total.append(out)
del patches_img
patches_img = []
cam_total = torch.cat(cam_total, dim=0)
seg_total = torch.cat(seg_total, dim=0)
if 'u2net' in seg_help.config.model:
prob = torch.softmax(seg_total[0], dim=1)
else:
prob = torch.softmax(seg_total, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.permute(1, 2, 0).unsqueeze(0).unsqueeze(0)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1], maxz - minz),
mode='trilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
su_cam_g = cam_total.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[1:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, padding=2, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.unsqueeze(1).detach()
su_data_g = make_grid(su_data_g, nrow=nrow, padding=2, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
_, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir, file_name[:-4] + "_cam"))
image_save = prob_max.unsqueeze(1).contiguous()
visual_batch(image_save, output_dir, file_name[:-4] + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, file_name[:-4] + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, file_name[:-4] + "_label", channel=1, nrow=nrow)
fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(1).contiguous()
visual_batch(fine_pred_torch, output_dir, file_name[:-4] + "_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1], img.shape[2]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy, minz: maxz] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += ('\n' + m + ': {val:.9f} '.format(val=lesion_segmentation_metrics[m]))
print(info)
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq(seg_help, model, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
out, feature, cam = model(patches_img)
if 'u2net' in seg_help.config.model:
prob = torch.softmax(out[0], dim=1)
else:
prob = torch.softmax(out, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += ('\n' + m + ': {val:.9f} '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'], thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight(seg_help, model, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
out1, out2, cam = model(patches_img)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
prob = torch.softmax(output, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
if cam is not None:
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += ('\n' + m + ': {val:.9f} '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('Test Dice : {val:.9f} '.format(val=lesion_segmentation_metrics['DICESCORE']))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam(seg_help, model, model_cam, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
cam_p = model_cam.forward_cam(patches_img)
out1, out2, cam = model(patches_img, cam_p)
output = None
xxx = 0
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1, out2, cam = model(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1, out2, cam = model(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
# prob = torch.softmax(output_0, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
if cam is not None:
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
# if np.sum(fine_pred_orgi_res)==0:
# continue
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
print(info)
print('Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'], thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam_50(seg_help, model, model_cam, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d_40s' % (thres, epoch))
mkdir_if_not_exist([output_dir])
semi_inf = os.listdir('../../log/3DCOVIDCT/Semi-Inf-Net/')
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
if str(iii) + '.png' not in semi_inf:
continue
img_semi = Image.open('../../log/3DCOVIDCT/Semi-Inf-Net/' + str(iii) + '.png')
img_semi = np.array(img_semi)
img_semi = np.where(img_semi > 128, 1, 0)
img_semi = resize(img_semi, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=0, inferior=0)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
cam_p = model_cam.forward_cam(patches_img)
out1, out2, cam = model(patches_img, cam_p)
xxx = 0
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
# visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1, out2, cam = model(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1, out2, cam = model(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
# prob = torch.softmax(output_0, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
if cam is not None:
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
impaintshow(su_data.cpu().numpy(), su_label.cpu().numpy(), fine_pred, output_dir,
"test_" + str(iii) + "_overlay")
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
print(info)
print('50 Slice Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'],
thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_cam(seg_help, model, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
_, _, cam = model(patches_img)
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam_infer(seg_help, model, model_cam, covid_test_data, thres=0.7, epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
cam_p = model_cam.forward_cam(patches_img)
out1, out2, cam = model.forward_seg(patches_img, cam_p)
output = None
xxx = 0
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1, out2, cam = model.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1, out2, cam = model.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
# prob = torch.softmax(output_0 + output_1, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
if cam is not None:
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'], thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam_50_infer(seg_help, model, model_cam, covid_test_data, thres=0.7,
epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d_40s' % (thres, epoch))
mkdir_if_not_exist([output_dir])
semi_inf = os.listdir('../../log/3DCOVIDCT/Semi-Inf-Net/')
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
if str(iii) + '.png' not in semi_inf:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
cam_p = model_cam.forward_cam(patches_img)
out1, out2, cam = model.forward_seg(patches_img, cam_p)
xxx = 0
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1, out2, cam = model.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1, out2, cam = model.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
# prob = torch.softmax(output_0 + output_1, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
if cam is not None:
su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
su_data_g = su_data.detach().clone()
su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
su_data_g = su_data_g.unsqueeze(0)
su_cam_g = seg_help.max_norm_cam(su_cam_g)
heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
join(output_dir,
"test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('50 Slice Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'],
thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam_infer_stu(seg_help, model, model_seg, model_cam, covid_test_data,
thres=0.7,
epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
model_seg.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d' % (thres, epoch))
mkdir_if_not_exist([output_dir])
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
# 1
cam_p = model_cam.forward_cam(patches_img)
out1 = model_seg.forward_seg(patches_img, cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
output_o = output
out1 = model.forward_seg(patches_img, cam_p)
output = None
xxx = 0
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output_o + output
# 2
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1 = model_seg.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_o = output
out1 = model.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output_o + output
# 3
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1 = model_seg.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_o = output
out1 = model.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output_o + output
# patches_img = patches_img.view(bb * cc, 1, xx, yy)
# cam_p = model_cam.forward_cam(patches_img)
# out1, out2, cam = model_seg(patches_img, cam_p)
# xxx = 0
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += pred
# xxx += 1
# pred = torch.softmax(pred, dim=1)
# pred_max = pred[:, seg_help.config.classes - 1, :, :]
# image_save = pred_max.unsqueeze(0).contiguous()
# visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
# output_0 = output
# cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
# out1, out2, cam = model_seg(seg_help.fliplr(patches_img), cam_p)
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += seg_help.fliplr(pred)
# output_1 = output
# cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
# out1, out2, cam = model_seg(seg_help.flipvr(patches_img), cam_p)
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += seg_help.flipvr(pred)
# output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
# if cam is not None:
# su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
# su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
# su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
# su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
# su_data_g = su_data.detach().clone()
# su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
# su_data_g = su_data_g.unsqueeze(0)
# su_cam_g = seg_help.max_norm_cam(su_cam_g)
# heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
# visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
# join(output_dir,
# "test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'], thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
# total 2d slice without seq
def predict_inf_inwindow_2d_out_seq_weight_cam_50_infer_stu(seg_help, model, model_seg, model_cam, covid_test_data,
thres=0.7,
epoch=0):
seg_help.model.eval()
model.eval()
model_cam.eval()
model_seg.eval()
resize_x = seg_help.config.patch_x
resize_y = seg_help.config.patch_y
segmentation_metrics = {
'Jaccard': 0, 'HD': 0,
'F1': 0, 'ACCURACY': 0, 'SENSITIVITY': 0, 'SPECIFICITY': 0,
'DICESCORE': 0}
lesion_segmentation_scores = {}
img_list_ = covid_test_data[0]
lung_list = covid_test_data[1]
inf_list = covid_test_data[2]
output_dir = join(seg_help.config.tmp_dir, 'infer_thres%.2f_%d_40s' % (thres, epoch))
mkdir_if_not_exist([output_dir])
semi_inf = os.listdir('../../log/3DCOVIDCT/Semi-Inf-Net/')
for iii in range(len(img_list_)):
# print(file_path)
img = img_list_[iii].copy()
lung = lung_list[iii].copy()
infection = inf_list[iii].copy()
if np.sum(infection) == 0:
continue
if str(iii) + '.png' not in semi_inf:
continue
nrow = 8
minx, maxx, miny, maxy = min_max_voi_2d(lung, superior=10, inferior=10)
cropped_im = img[minx: maxx, miny: maxy]
cropped_if = infection[minx: maxx, miny: maxy]
resized_img = resize(cropped_im, (resize_x, resize_y), order=3, mode='constant',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
resized_infection = resize(cropped_if, (resize_x, resize_y), order=0, mode='edge',
cval=0, clip=True, preserve_range=True, anti_aliasing=False)
su_data = torch.from_numpy(resized_img)
su_label = torch.from_numpy(resized_infection)
su_data = su_data.unsqueeze(0).unsqueeze(0)
su_label = su_label.unsqueeze(0).unsqueeze(0)
cropp_img = su_data
cropp_img = cropp_img
patches_img = cropp_img.to(seg_help.equipment).float()
with torch.no_grad():
bb, cc, xx, yy = patches_img.size()
patches_img = patches_img.view(bb * cc, 1, xx, yy)
# 1
cam_p = model_cam.forward_cam(patches_img)
out1 = model_seg.forward_seg(patches_img, cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
output_o = output
out1 = model.forward_seg(patches_img, cam_p)
output = None
xxx = 0
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += pred
xxx += 1
pred = torch.softmax(pred, dim=1)
pred_max = pred[:, seg_help.config.classes - 1, :, :]
image_save = pred_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
output_0 = output_o + output
# 2
cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
out1 = model_seg.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_o = output
out1 = model.forward_seg(seg_help.fliplr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.fliplr(pred)
output_1 = output_o + output
# 3
cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
out1 = model_seg.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_o = output
out1 = model.forward_seg(seg_help.flipvr(patches_img), cam_p)
output = None
for pred in out1:
if output is None:
output = torch.zeros(pred.size()).to(seg_help.equipment)
output += seg_help.flipvr(pred)
output_2 = output_o + output
# patches_img = patches_img.view(bb * cc, 1, xx, yy)
# cam_p = model_cam.forward_cam(patches_img)
# out1, out2, cam = model_seg(patches_img, cam_p)
# xxx = 0
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += pred
# xxx += 1
# pred = torch.softmax(pred, dim=1)
# pred_max = pred[:, seg_help.config.classes - 1, :, :]
# image_save = pred_max.unsqueeze(0).contiguous()
# visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob_" + str(xxx), channel=1, nrow=nrow)
# output_0 = output
# cam_p = model_cam.forward_cam(seg_help.fliplr(patches_img))
# out1, out2, cam = model_seg(seg_help.fliplr(patches_img), cam_p)
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += seg_help.fliplr(pred)
# output_1 = output
# cam_p = model_cam.forward_cam(seg_help.flipvr(patches_img))
# out1, out2, cam = model_seg(seg_help.flipvr(patches_img), cam_p)
# output = None
# for pred in out1:
# if output is None:
# output = torch.zeros(pred.size()).to(seg_help.equipment)
# output += seg_help.flipvr(pred)
# output_2 = output
prob = torch.softmax(output_0 + output_1 + output_2, dim=1)
prob_max = prob[:, seg_help.config.classes - 1, :, :]
prob_max_up = prob_max.unsqueeze(1)
prob_max_up = F.interpolate(prob_max_up,
size=(cropped_im.shape[0], cropped_im.shape[1]),
mode='bilinear',
align_corners=True)
prob_max_ = torch.squeeze(prob_max).cpu().numpy()
fine_pred = np.where(prob_max_ > thres, 1, 0)
prob_max_up = torch.squeeze(prob_max_up).cpu().numpy()
fine_pred_up = np.where(prob_max_up > thres, 1, 0)
fine_pred_up_final = np.where(prob_max_up > thres, 1, 0)
# fine_pred_final = ndimage.binary_dilation(fine_pred, iterations=1).astype(fine_pred.dtype)
# fine_pred_up_final = ndimage.binary_dilation(fine_pred_up, iterations=1).astype(fine_pred_up.dtype)
# if cam is not None:
# su_cam_g = cam.detach()[:, seg_help.config.classes - 1, :, :].unsqueeze(1)
# su_cam_g = F.interpolate(su_cam_g, size=su_data.size()[2:], mode='bilinear', align_corners=True)
# su_cam_g = make_grid(su_cam_g, nrow=nrow, pad_value=1)
# su_cam_g = su_cam_g[0, :, :].unsqueeze(0).unsqueeze(0)
# su_data_g = su_data.detach().clone()
# su_data_g = make_grid(su_data_g, nrow=nrow, pad_value=1)
# su_data_g = su_data_g.unsqueeze(0)
# su_cam_g = seg_help.max_norm_cam(su_cam_g)
# heatmap, result_pp = seg_help.visualize_cam(su_cam_g.cpu(), su_data_g.cpu())
# visualize(np.clip(np.transpose(result_pp.detach().cpu().numpy(), (1, 2, 0)), 0, 1),
# join(output_dir,
# "test_" + str(iii) + '_cam'))
image_save = prob_max.unsqueeze(0).contiguous()
visual_batch(image_save, output_dir, "test_" + str(iii) + "_prob", channel=1, nrow=nrow)
visual_batch(su_data, output_dir, "test_" + str(iii) + "_images", channel=1, nrow=nrow)
visual_batch(su_label, output_dir, "test_" + str(iii) + "_label", channel=1, nrow=nrow)
# fine_pred_torch = torch.from_numpy(fine_pred_final).unsqueeze(0).contiguous()
# visual_batch(fine_pred_torch, output_dir, str(iii) + "_test_predict", channel=1, nrow=nrow)
fine_pred_orgi_res = np.zeros((img.shape[0], img.shape[1]), dtype='float32')
fine_pred_orgi_res[minx: maxx, miny: maxy] = fine_pred_up_final
scores = compute_all_metric_for_single_seg(infection, fine_pred_orgi_res)
for metric in segmentation_metrics:
if metric not in lesion_segmentation_scores:
lesion_segmentation_scores[metric] = []
lesion_segmentation_scores[metric].extend(scores[metric])
empty_cache()
lesion_segmentation_metrics = {}
info = ''
for m in lesion_segmentation_scores:
# print(lesion_segmentation_scores[m])
lesion_segmentation_metrics[m] = np.mean(lesion_segmentation_scores[m])
info += (m + ': {val:.9f} \t '.format(val=lesion_segmentation_metrics[m]))
# print(info)
print('50 Slice Test Dice : {val:.9f} Thres {thre:.2f} '.format(val=lesion_segmentation_metrics['DICESCORE'],
thre=thres))
zipDir(output_dir, output_dir + '.zip')
shutil.rmtree(output_dir)
return lesion_segmentation_metrics['DICESCORE']
| 50.25335
| 119
| 0.600635
| 11,122
| 82,516
| 4.157256
| 0.032458
| 0.033458
| 0.012977
| 0.020417
| 0.940567
| 0.928391
| 0.920453
| 0.907953
| 0.900037
| 0.891364
| 0
| 0.019914
| 0.277025
| 82,516
| 1,641
| 120
| 50.283973
| 0.755134
| 0.111978
| 0
| 0.859209
| 0
| 0
| 0.036501
| 0.00271
| 0
| 0
| 0
| 0
| 0.000761
| 1
| 0.009893
| false
| 0
| 0.006088
| 0
| 0.024353
| 0.012938
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16e8799cbff075910d3ab57c939934ea38302a9b
| 6,873
|
py
|
Python
|
tests/integration/Test_Adjectives.py
|
blagae/whitakers_words
|
d3b7b91053f727770c3e56e807344ee7d02b3d79
|
[
"MIT"
] | 14
|
2020-07-28T04:47:49.000Z
|
2022-03-25T05:57:28.000Z
|
tests/integration/Test_Adjectives.py
|
blagae/whitakers_words
|
d3b7b91053f727770c3e56e807344ee7d02b3d79
|
[
"MIT"
] | 3
|
2020-07-28T04:49:24.000Z
|
2022-02-22T23:02:25.000Z
|
tests/integration/Test_Adjectives.py
|
blagae/whitakers_words
|
d3b7b91053f727770c3e56e807344ee7d02b3d79
|
[
"MIT"
] | 3
|
2020-08-02T00:49:19.000Z
|
2022-02-19T00:02:38.000Z
|
import unittest
from whitakers_words.enums import Case, Degree, Gender, Number, WordType
from whitakers_words.parser import Parser
class AdjectiveTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.par = Parser()
def test_saevissimae(self):
result = self.par.parse("saevissimae")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 1)
for analysis in result.forms[0].analyses.values():
self.assertEqual(analysis.lexeme.roots[0], 'saev')
self.assertEqual(analysis.lexeme.wordType, WordType.ADJ)
self.assertEqual(len(analysis.inflections), 4)
# common properties and features
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'saevissi')
self.assertEqual(inflection.affix, 'mae')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.SUPER))
self.assertTrue(inflection.has_feature(Gender.F))
other_features = [[x.features['Case'], x.features['Number']] for x in analysis.inflections]
self.assertTrue([Case.GEN, Number.S] in other_features)
self.assertTrue([Case.DAT, Number.S] in other_features)
self.assertTrue([Case.VOC, Number.P] in other_features)
self.assertTrue([Case.NOM, Number.P] in other_features)
def test_bonorum(self):
result = self.par.parse("bonorum")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 3)
for analysis in result.forms[0].analyses.values():
if analysis.lexeme.wordType == WordType.ADJ:
self.assertEqual(analysis.lexeme.roots[0], 'bon')
self.assertEqual(analysis.lexeme.wordType, WordType.ADJ)
self.assertEqual(len(analysis.inflections), 2)
# common properties and features
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'bon')
self.assertEqual(inflection.affix, 'orum')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.POS))
self.assertTrue(inflection.has_feature(Case.GEN))
self.assertTrue(inflection.has_feature(Number.P))
other_features = [x.features['Gender'] for x in analysis.inflections]
self.assertTrue(Gender.M in other_features)
self.assertTrue(Gender.N in other_features)
def test_felicium(self):
result = self.par.parse("felicium")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 1)
for analysis in result.forms[0].analyses.values():
self.assertEqual(analysis.lexeme.roots[0], 'felix')
self.assertEqual(analysis.lexeme.wordType, WordType.ADJ)
self.assertEqual(len(analysis.inflections), 1)
# common properties and features
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'felic')
self.assertEqual(inflection.affix, 'ium')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.POS))
self.assertTrue(inflection.has_feature(Case.GEN))
self.assertTrue(inflection.has_feature(Number.P))
self.assertTrue(inflection.has_feature(Gender.X))
def test_melius(self):
result = self.par.parse("melius")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 2) # see Test_Adverbs.test_melius
analysis = result.forms[0].analyses[6825]
self.assertEqual(analysis.lexeme.roots[0], 'bon')
self.assertEqual(analysis.lexeme.wordType, WordType.ADJ)
self.assertEqual(len(analysis.inflections), 3)
# common properties and features
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'meli')
self.assertEqual(inflection.affix, 'us')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.COMP))
self.assertTrue(inflection.has_feature(Gender.N))
self.assertTrue(inflection.has_feature(Number.S))
other_features = [x.features['Case'] for x in analysis.inflections]
self.assertTrue(Case.VOC in other_features)
self.assertTrue(Case.ACC in other_features)
self.assertTrue(Case.NOM in other_features)
def test_anceps(self):
result = self.par.parse("anceps")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 1)
for analysis in result.forms[0].analyses.values():
self.assertEqual(analysis.lexeme.roots[0], 'anceps')
self.assertEqual(analysis.lexeme.wordType, WordType.ADJ)
self.assertEqual(len(analysis.inflections), 3)
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'anceps')
self.assertEqual(inflection.affix, '')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.POS))
self.assertTrue(inflection.has_feature(Number.S))
other_features = [x.features['Case'] for x in analysis.inflections]
self.assertTrue(Case.VOC in other_features)
self.assertTrue(Case.NOM in other_features)
def test_acer(self):
result = self.par.parse("acer")
self.assertEqual(len(result.forms), 1)
self.assertEqual(len(result.forms[0].analyses), 2)
for analysis in result.forms[0].analyses.values():
if analysis.lexeme.wordType == WordType.ADJ:
self.assertEqual(analysis.lexeme.roots[0], 'acer')
self.assertEqual(len(analysis.inflections), 2)
for inflection in analysis.inflections:
self.assertEqual(inflection.stem, 'acer')
self.assertEqual(inflection.affix, '')
self.assertEqual(inflection.wordType, WordType.ADJ)
self.assertTrue(inflection.has_feature(Degree.POS))
self.assertTrue(inflection.has_feature(Number.S))
self.assertTrue(inflection.has_feature(Gender.M))
other_features = [x.features['Case'] for x in analysis.inflections]
self.assertTrue(Case.VOC in other_features)
self.assertTrue(Case.NOM in other_features)
| 48.062937
| 103
| 0.636549
| 754
| 6,873
| 5.742706
| 0.114058
| 0.162818
| 0.074827
| 0.106005
| 0.86097
| 0.804388
| 0.75843
| 0.741109
| 0.712702
| 0.683603
| 0
| 0.007799
| 0.253747
| 6,873
| 142
| 104
| 48.401408
| 0.83642
| 0.022116
| 0
| 0.547826
| 0
| 0
| 0.020402
| 0
| 0
| 0
| 0
| 0
| 0.669565
| 1
| 0.06087
| false
| 0
| 0.026087
| 0
| 0.095652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcb40d65b9639f5490fdfc83bd8655c050cb4e6f
| 2,912
|
py
|
Python
|
mmdet/datasets/icevision_all_classes.py
|
jingwoo4710/mmdetection-icevision
|
da82741b29fdd1eb77b4e7483ff2a515d43d1760
|
[
"Apache-2.0"
] | 4
|
2020-03-13T00:12:44.000Z
|
2021-06-25T07:54:17.000Z
|
mmdet/datasets/icevision_all_classes.py
|
jingwoo4710/mmdetection-icevision
|
da82741b29fdd1eb77b4e7483ff2a515d43d1760
|
[
"Apache-2.0"
] | 4
|
2020-03-13T00:24:15.000Z
|
2022-03-12T00:19:03.000Z
|
mmdet/datasets/icevision_all_classes.py
|
jingwoo4710/mmdetection-icevision
|
da82741b29fdd1eb77b4e7483ff2a515d43d1760
|
[
"Apache-2.0"
] | 1
|
2021-03-07T06:24:08.000Z
|
2021-03-07T06:24:08.000Z
|
from .coco import CocoDataset
from .registry import DATASETS
@DATASETS.register_module
class IceVisionAllClasses(CocoDataset):
#CLASSES = ('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82')
#CLASSES = ("2.1", "2.4", )
# 126 classes CLASSES = ('1.11.1','1.11.2', '1.12.1', '1.12.2', '1.13', '1.15', '1.16', '1.17', '1.20.1', '1.20.2', '1.20.3', '1.22', '1.23', '1.25', '1.31', '1.33', '1.34.1', '1.34.2', '1.34.3', '1.8', '2.1', '2.2', '2.3.1', '2.3.2', '2.4', '2.5', '3.1', '3.10', '3.11', '3.13', '3.18.1', '3.18.2', '3.19', '3.2', '3.20', '3.24', '3.25', '3.27', '3.28', '3.3', '3.31', '3.32', '3.4', '3.5', '4.1.1', '4.1.2', '4.1.3', '4.1.4', '4.1.5', '4.1.6', '4.2.1', '4.2.2', '4.2.3', '4.3', '4.4.1', '4.4.2', '4.5.1', '4.5.2', '5.14', '5.15.1', '5.15.2', '5.15.3', '5.15.4', '5.15.5', '5.15.6', '5.15.7', '5.16', '5.19.1', '5.19.2', '5.20', '5.21', '5.23.1', '5.24.1', '5.3', '5.31', '5.32', '5.4', '5.5', '5.6', '5.7.1', '5.7.2', '6.10.1', '6.10.2', '6.11', '6.13', '6.16', '6.3.1', '6.4', '6.6', '6.7', '6.8.1', '6.9.1', '6.9.2', '7.19', '7.2', '7.3', '7.5', '8', '8.1.1', '8.11', '8.13', '8.14', '8.17', '8.2.1', '8.2.2', '8.2.3', '8.2.4', '8.2.5', '8.2.6', '8.21.1', '8.22.1', '8.22.2', '8.22.3', '8.23', '8.24', '8.3.1', '8.3.2', '8.4.1', '8.4.3', '8.5.2', '8.5.4', '8.6.1', '8.6.5', '8.7', '8.8')
CLASSES = ('1.1', '1.11.1', '1.11.2', '1.12.1', '1.12.2', '1.13', '1.15', '1.16', '1.17', '1.20.1', '1.20.2', '1.20.3', '1.22', '1.23', '1.25', '1.3.1', '1.31', '1.33', '1.34.1', '1.34.2', '1.34.3', '1.8', '2.1', '2.2', '2.3.1', '2.3.2', '2.4', '2.5', '2.6', '3.1', '3.10', '3.11', '3.13', '3.18.1', '3.18.2', '3.19', '3.2', '3.20', '3.24', '3.25', '3.27', '3.28', '3.3', '3.31', '3.32', '3.4', '3.5', '3.9', '4.1.1', '4.1.2', '4.1.3', '4.1.4', '4.1.5', '4.1.6', '4.2.1', '4.2.2', '4.2.3', '4.3', '4.4.1', '4.4.2', '4.5.1', '4.5.2', '5.14', '5.15.1', '5.15.2', '5.15.3', '5.15.4', '5.15.5', '5.15.6', '5.15.7', '5.16', '5.19.1', '5.19.2', '5.20', '5.21', '5.23.1', '5.24.1', '5.3', '5.31', '5.32', '5.4', '5.5', '5.6', '5.7.1', '5.7.2', '6.10.1', '6.10.2', '6.11', '6.12', '6.13', '6.16', '6.18.3', '6.3.1', '6.4', '6.6', '6.7', '6.8.1', '6.9.1', '6.9.2', '7.19', '7.2', '7.3', '7.5', '7.7', '8', '8.1.1', '8.1.4', '8.11', '8.13', '8.14', '8.17', '8.2.1', '8.2.2', '8.2.3', '8.2.4', '8.2.5', '8.2.6', '8.21.1', '8.22.1', '8.22.2', '8.22.3', '8.23', '8.24', '8.3.1', '8.3.2', '8.3.3', '8.4.1', '8.4.3', '8.5.2', '8.5.4', '8.6.1', '8.6.5', '8.7', '8.8')
| 264.727273
| 1,155
| 0.373626
| 756
| 2,912
| 1.437831
| 0.124339
| 0.029439
| 0.01104
| 0.014719
| 0.701932
| 0.682613
| 0.682613
| 0.682613
| 0.682613
| 0.682613
| 0
| 0.37327
| 0.131525
| 2,912
| 11
| 1,155
| 264.727273
| 0.056544
| 0.552541
| 0
| 0
| 0
| 0
| 0.465027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4c73aa982d17c9b1bfdcd87d601b3ed6148b939d
| 165
|
py
|
Python
|
test_Calculator.py
|
Ayush6459/Simple_Calculator_package
|
8c7260ed8e91def3bf0964798b716e1305433872
|
[
"MIT"
] | null | null | null |
test_Calculator.py
|
Ayush6459/Simple_Calculator_package
|
8c7260ed8e91def3bf0964798b716e1305433872
|
[
"MIT"
] | null | null | null |
test_Calculator.py
|
Ayush6459/Simple_Calculator_package
|
8c7260ed8e91def3bf0964798b716e1305433872
|
[
"MIT"
] | null | null | null |
from Calculator import add_num, sub_num
def test_calculator_to_add():
assert add_num(5, 6) == 11
def test_calculator_to_sub():
assert sub_num(6, 5) == 1
| 16.5
| 39
| 0.709091
| 29
| 165
| 3.689655
| 0.482759
| 0.11215
| 0.317757
| 0.35514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 0.187879
| 165
| 9
| 40
| 18.333333
| 0.746269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d5c02169a2adf1f4711090f75d884d1fb4d8d337
| 8,824
|
py
|
Python
|
test/test_lm_finetuning.py
|
skiran252/FARM
|
8460d78910a20d19a5da12de6e9bff11f68332a7
|
[
"Apache-2.0"
] | 1
|
2021-02-02T11:42:00.000Z
|
2021-02-02T11:42:00.000Z
|
test/test_lm_finetuning.py
|
skiran252/FARM
|
8460d78910a20d19a5da12de6e9bff11f68332a7
|
[
"Apache-2.0"
] | null | null | null |
test/test_lm_finetuning.py
|
skiran252/FARM
|
8460d78910a20d19a5da12de6e9bff11f68332a7
|
[
"Apache-2.0"
] | null | null | null |
import logging
from pathlib import Path
import numpy as np
import torch
from farm.data_handler.data_silo import DataSilo
from farm.data_handler.processor import BertStyleLMProcessor
from farm.experiment import initialize_optimizer
from farm.modeling.adaptive_model import AdaptiveModel
from farm.modeling.language_model import LanguageModel
from farm.modeling.prediction_head import BertLMHead, NextSentenceHead
from farm.modeling.tokenization import Tokenizer
from farm.train import Trainer
from farm.utils import set_all_seeds, initialize_device_settings
from farm.infer import Inferencer
def test_lm_finetuning(caplog):
caplog.set_level(logging.CRITICAL)
set_all_seeds(seed=42)
device, n_gpu = initialize_device_settings(use_cuda=False)
n_epochs = 1
batch_size = 1
evaluate_every = 2
lang_model = "bert-base-cased"
tokenizer = Tokenizer.load(
pretrained_model_name_or_path=lang_model, do_lower_case=False
)
processor = BertStyleLMProcessor(
data_dir=Path("samples/lm_finetuning"),
train_filename="train-sample.txt",
test_filename="test-sample.txt",
dev_filename=None,
tokenizer=tokenizer,
max_seq_len=12,
next_sent_pred=True
)
data_silo = DataSilo(processor=processor, batch_size=batch_size, max_processes=1)
language_model = LanguageModel.load(lang_model)
lm_prediction_head = BertLMHead.load(lang_model)
next_sentence_head = NextSentenceHead.load(lang_model)
model = AdaptiveModel(
language_model=language_model,
prediction_heads=[lm_prediction_head, next_sentence_head],
embeds_dropout_prob=0.1,
lm_output_types=["per_token", "per_sequence"],
device=device,
)
model, optimizer, lr_schedule = initialize_optimizer(
model=model,
learning_rate=2e-5,
#optimizer_opts={'name': 'AdamW', 'lr': 2E-05},
n_batches=len(data_silo.loaders["train"]),
n_epochs=1,
device=device,
schedule_opts={'name': 'CosineWarmup', 'warmup_proportion': 0.1})
trainer = Trainer(
model=model,
optimizer=optimizer,
data_silo=data_silo,
epochs=n_epochs,
n_gpu=n_gpu,
evaluate_every=evaluate_every,
device=device,
)
trainer.train()
# LM embeddings and weight of decoder in head are shared and should therefore be equal
assert torch.all(
torch.eq(model.language_model.model.embeddings.word_embeddings.weight, model.prediction_heads[0].decoder.weight))
save_dir = Path("testsave/lm_finetuning")
model.save(save_dir)
processor.save(save_dir)
del model
del processor
del optimizer
del data_silo
del trainer
basic_texts = [
{"text": "Farmer's life is great."},
{"text": "It's nothing for big city kids though."},
]
model = Inferencer.load(save_dir, task_type="embeddings", num_processes=0)
result = model.extract_vectors(dicts=basic_texts)
assert result[0]["context"] == ['Farmer', "'", 's', 'life', 'is', 'great', '.']
assert result[0]["vec"].shape == (768,)
# TODO check why results vary accross runs with same seed
assert isinstance(result[0]["vec"][0], np.float32)
def test_lm_finetuning_no_next_sentence(caplog):
caplog.set_level(logging.CRITICAL)
set_all_seeds(seed=42)
device, n_gpu = initialize_device_settings(use_cuda=False)
n_epochs = 1
batch_size = 1
evaluate_every = 2
lang_model = "bert-base-cased"
tokenizer = Tokenizer.load(
pretrained_model_name_or_path=lang_model, do_lower_case=False
)
processor = BertStyleLMProcessor(
data_dir=Path("samples/lm_finetuning"),
train_filename="train-sample.txt",
test_filename="test-sample.txt",
dev_filename=None,
tokenizer=tokenizer,
max_seq_len=12,
next_sent_pred=False
)
data_silo = DataSilo(processor=processor, batch_size=batch_size, max_processes=1)
language_model = LanguageModel.load(lang_model)
lm_prediction_head = BertLMHead.load(lang_model)
model = AdaptiveModel(
language_model=language_model,
prediction_heads=[lm_prediction_head],
embeds_dropout_prob=0.1,
lm_output_types=["per_token"],
device=device,
)
model, optimizer, lr_schedule = initialize_optimizer(
model=model,
learning_rate=2e-5,
#optimizer_opts={'name': 'AdamW', 'lr': 2E-05},
n_batches=len(data_silo.loaders["train"]),
n_epochs=1,
device=device,
schedule_opts={'name': 'CosineWarmup', 'warmup_proportion': 0.1}
)
trainer = Trainer(
model=model,
optimizer=optimizer,
data_silo=data_silo,
epochs=n_epochs,
n_gpu=n_gpu,
lr_schedule=lr_schedule,
evaluate_every=evaluate_every,
device=device,
)
trainer.train()
# LM embeddings and weight of decoder in head are shared and should therefore be equal
assert torch.all(
torch.eq(model.language_model.model.embeddings.word_embeddings.weight, model.prediction_heads[0].decoder.weight))
save_dir = Path("testsave/lm_finetuning_no_nsp")
model.save(save_dir)
processor.save(save_dir)
del model
del processor
del optimizer
del data_silo
del trainer
basic_texts = [
{"text": "Farmer's life is great."},
{"text": "It's nothing for big city kids though."},
]
model = Inferencer.load(save_dir, task_type="embeddings", num_processes=0)
result = model.extract_vectors(dicts=basic_texts)
assert result[0]["context"] == ['Farmer', "'", 's', 'life', 'is', 'great', '.']
assert result[0]["vec"].shape == (768,)
# TODO check why results vary accross runs with same seed
assert isinstance(result[0]["vec"][0], np.float32)
def test_lm_finetuning_custom_vocab(caplog):
caplog.set_level(logging.CRITICAL)
set_all_seeds(seed=42)
device, n_gpu = initialize_device_settings(use_cuda=False)
n_epochs = 1
batch_size = 1
evaluate_every = 2
lang_model = "bert-base-cased"
tokenizer = Tokenizer.load(
pretrained_model_name_or_path=lang_model, do_lower_case=False
)
tokenizer.add_tokens(["aaaaaaaaaaaaaaaa", "bbbbbbbbbbbbbbbbbbbbb", "ccccccccccccccccccccccc"])
processor = BertStyleLMProcessor(
data_dir=Path("samples/lm_finetuning"),
train_filename="train-sample.txt",
test_filename="test-sample.txt",
dev_filename=None,
tokenizer=tokenizer,
max_seq_len=12,
next_sent_pred=True
)
data_silo = DataSilo(processor=processor, batch_size=batch_size, max_processes=1)
language_model = LanguageModel.load(lang_model, n_added_tokens=len(tokenizer.get_added_vocab()))
lm_prediction_head = BertLMHead.load(lang_model, n_added_tokens=len(tokenizer.get_added_vocab()))
next_sentence_head = NextSentenceHead.load(lang_model)
model = AdaptiveModel(
language_model=language_model,
prediction_heads=[lm_prediction_head, next_sentence_head],
embeds_dropout_prob=0.1,
lm_output_types=["per_token", "per_sequence"],
device=device
)
model, optimizer, lr_schedule = initialize_optimizer(
model=model,
learning_rate=2e-5,
#optimizer_opts={'name': 'AdamW', 'lr': 2E-05},
n_batches=len(data_silo.loaders["train"]),
n_epochs=1,
device=device,
schedule_opts={'name': 'CosineWarmup', 'warmup_proportion': 0.1}
)
trainer = Trainer(
model=model,
optimizer=optimizer,
data_silo=data_silo,
epochs=n_epochs,
n_gpu=n_gpu,
lr_schedule=lr_schedule,
evaluate_every=evaluate_every,
device=device,
)
trainer.train()
# LM embeddings and weight of decoder in head are shared and should therefore be equal
assert torch.all(
torch.eq(model.language_model.model.embeddings.word_embeddings.weight, model.prediction_heads[0].decoder.weight))
save_dir = Path("testsave/lm_finetuning")
model.save(save_dir)
processor.save(save_dir)
del model
del processor
del optimizer
del data_silo
del trainer
basic_texts = [
{"text": "Farmer's life is great."},
{"text": "It's nothing for big city kids though."},
]
model = Inferencer.load(save_dir, task_type="embeddings", num_processes=0)
result = model.extract_vectors(dicts=basic_texts)
assert result[0]["context"] == ['Farmer', "'", 's', 'life', 'is', 'great', '.']
assert result[0]["vec"].shape == (768,)
# TODO check why results vary accross runs with same seed
assert isinstance(result[0]["vec"][0], np.float32)
if(__name__=="__main__"):
test_lm_finetuning()
| 32.441176
| 121
| 0.682344
| 1,114
| 8,824
| 5.147217
| 0.163375
| 0.022323
| 0.018137
| 0.013603
| 0.883851
| 0.883851
| 0.883851
| 0.879316
| 0.879316
| 0.879316
| 0
| 0.012487
| 0.210449
| 8,824
| 272
| 122
| 32.441176
| 0.810535
| 0.063577
| 0
| 0.79638
| 0
| 0
| 0.10212
| 0.021805
| 0
| 0
| 0
| 0.003676
| 0.054299
| 1
| 0.013575
| false
| 0
| 0.063348
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
910733173321ba3de99527363c5d730d9efa57c4
| 225
|
py
|
Python
|
Lib/site-packages/robotframework_selenium2library-3.0.0.dev1-py2.7.egg/Selenium2Library/base/context.py
|
firescrum/RobotFrameworkTools
|
3ebe459c9bd3506c60e6b207bf08235236e824ed
|
[
"bzip2-1.0.6"
] | 11
|
2017-09-30T05:47:28.000Z
|
2019-04-15T11:58:40.000Z
|
Lib/site-packages/robotframework_selenium2library-3.0.0.dev1-py2.7.egg/Selenium2Library/base/context.py
|
firescrum/RobotFrameworkTools
|
3ebe459c9bd3506c60e6b207bf08235236e824ed
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/robotframework_selenium2library-3.0.0.dev1-py2.7.egg/Selenium2Library/base/context.py
|
firescrum/RobotFrameworkTools
|
3ebe459c9bd3506c60e6b207bf08235236e824ed
|
[
"bzip2-1.0.6"
] | 7
|
2018-02-13T10:22:39.000Z
|
2019-07-04T07:39:28.000Z
|
class ContextAware(object):
def __init__(self, ctx):
self.ctx = ctx
@property
def browser(self):
return self.ctx._browser
@property
def browsers(self):
return self.ctx._browsers
| 17.307692
| 33
| 0.617778
| 26
| 225
| 5.115385
| 0.423077
| 0.210526
| 0.210526
| 0.255639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.288889
| 225
| 12
| 34
| 18.75
| 0.83125
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.222222
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9133c4846950529ca16a859f4dae933a44ac7a69
| 17,003
|
py
|
Python
|
ffai/ai/layers.py
|
tysen2k/ffai
|
2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec
|
[
"Apache-2.0"
] | null | null | null |
ffai/ai/layers.py
|
tysen2k/ffai
|
2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec
|
[
"Apache-2.0"
] | null | null | null |
ffai/ai/layers.py
|
tysen2k/ffai
|
2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec
|
[
"Apache-2.0"
] | null | null | null |
"""
==========================
Author: Niels Justesen
Year: 2018
==========================
This module contains the feature layers used by the gym implementation.
"""
from ffai.core.procedure import *
class FeatureLayer:
def produce(self, game):
raise NotImplementedError("Must be overridden by subclass")
def name(self):
raise NotImplementedError("Must be overridden by subclass")
class OccupiedLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
for y in range(len(game.state.pitch.board)):
for x in range(len(game.state.pitch.board[0])):
out[y][x] = 1.0 if game.state.pitch.board[y][x] is not None else 0.0
return out
def name(self):
return "occupied"
class OwnPlayerLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for y in range(len(game.state.pitch.board)):
for x in range(len(game.state.pitch.board[0])):
out[y][x] = 1.0 if game.state.pitch.board[y][x] is not None and \
game.state.pitch.board[y][x].team == active_team is not None else 0.0
return out
def name(self):
return "own players"
class OppPlayerLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for y in range(len(game.state.pitch.board)):
for x in range(len(game.state.pitch.board[0])):
out[y][x] = 1.0 if game.state.pitch.board[y][x] is not None and \
game.state.pitch.board[y][x].team != active_team is not None else 0.0
return out
def name(self):
return "opp players"
class OwnTackleZoneLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
if player.has_tackle_zone():
for square in game.get_adjacent_squares(player.position):
out[square.y][square.x] += 0.125
return out
def name(self):
return "own tackle zones"
class OppTackleZoneLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in game.get_opp_team(active_team).players:
if player.position is not None:
if player.has_tackle_zone():
for square in game.get_adjacent_squares(player.position):
out[square.y][square.x] += 0.125
return out
def name(self):
return "opp tackle zones"
class UsedLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.used else 0.0
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.used else 0.0
return out
def name(self):
return "used players"
class UpLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.up else 0.0
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.up else 0.0
return out
def name(self):
return "standing players"
class StunnedLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.stunned else 0.0
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0 if player.state.stunned else 0.0
return out
def name(self):
return "stunned players"
class ActivePlayerLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
if game.state.active_player is None or game.state.active_player.position is None:
return out
out[game.state.active_player.position.y][game.state.active_player.position.x] = 1.0
return out
def name(self):
return "active players"
class TargetPlayerLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
target = None
for i in reversed(range(game.state.stack.size())):
proc = game.state.stack.items[i]
if isinstance(proc, Block):
target = proc.defender
break
if isinstance(proc, PassAction):
target = proc.catcher
break
if isinstance(proc, Handoff):
target = proc.catcher
break
if isinstance(proc, Foul):
target = proc.defender
break
if target is not None and target.position is not None:
out[target.position.y][target.position.x] = 1.0
return out
def name(self):
return "target player"
class AvailablePlayerLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for action_choice in game.state.available_actions:
for player in action_choice.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0
return out
def name(self):
return "available players"
class AvailablePositionLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for action_choice in game.state.available_actions:
for position in action_choice.positions:
if position is not None:
out[position.y][position.x] = 1.0
for player in action_choice.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1.0
return out
def name(self):
return "available positions"
class RollProbabilityLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for action_choice in game.state.available_actions:
for i in range(len(action_choice.positions)):
if action_choice.positions[i] is not None:
if i < len(action_choice.agi_rolls):
# Convert to chance of succeeding
chance = 1.0
for roll in action_choice.agi_rolls[i]:
chance = chance * ((1+(6-roll)) / 6)
out[action_choice.positions[i].y][action_choice.positions[i].x] = chance
return out
def name(self):
return "roll probabilities"
class BlockDiceLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for action_choice in game.state.available_actions:
for i in range(len(action_choice.positions)):
if action_choice.positions[i] is not None:
if i < len(action_choice.block_rolls):
roll = (action_choice.block_rolls[i] + 3) / 6.0
out[action_choice.positions[i].y][action_choice.positions[i].x] = roll
return out
def name(self):
return "block dice"
class MALayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_ma() * 0.1
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_ma() * 0.1
return out
def name(self):
return "movement allowence"
class STLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_st() * 0.1
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_st() * 0.1
return out
def name(self):
return "strength"
class AGLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_ag() * 0.1
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_ag() * 0.1
return out
def name(self):
return "agility"
class AVLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_av() * 0.1
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = player.get_av() * 0.1
return out
def name(self):
return "armor value"
class SkillLayer(FeatureLayer):
def __init__(self, skill):
self.skill = skill
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = 1 if player.has_skill(self.skill) else 0.0
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = 1 if player.has_skill(self.skill) else 0.0
return out
def name(self):
return self.skill.name.replace("_", " ").lower()
class MovemenLeftLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
if active_team is None:
return out
for player in active_team.players:
if player.position is not None:
out[player.position.y][player.position.x] = (player.get_ma() - player.state.moves) * 0.1
for player in game.get_opp_team(active_team).players:
if player.position is not None:
out[player.position.y][player.position.x] = (player.get_ma() - player.state.moves) * 0.1
return out
def name(self):
return "movement left"
class BallLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
for ball in game.state.pitch.balls:
if ball.position is not None:
out[ball.position.y][ball.position.x] = 1.0
return out
def name(self):
return "balls"
class OwnHalfLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
home = active_team == game.state.home_team
tiles = TwoPlayerArena.home_tiles if home else TwoPlayerArena.away_tiles
for y in range(len(game.arena.board)):
for x in range(len(game.arena.board[0])):
out[y][x] = 1.0 if game.arena.board[y][x] in tiles else 0.0
return out
def name(self):
return "own half"
class OwnTouchdownLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
home = active_team == game.state.home_team
tile = Tile.HOME_TOUCHDOWN if home else Tile.AWAY_TOUCHDOWN
for y in range(len(game.arena.board)):
for x in range(len(game.arena.board[0])):
out[y][x] = 1.0 if game.arena.board[y][x] == tile else 0.0
return out
def name(self):
return "own touchdown"
class OppTouchdownLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
active_team = game.state.available_actions[0].team if len(game.state.available_actions) > 0 else None
home = active_team == game.state.home_team
tile = Tile.HOME_TOUCHDOWN if not home else Tile.AWAY_TOUCHDOWN
for y in range(len(game.arena.board)):
for x in range(len(game.arena.board[0])):
out[y][x] = 1.0 if game.arena.board[y][x] == tile else 0.0
return out
def name(self):
return "opp touchdown"
class CrowdLayer(FeatureLayer):
def produce(self, game):
out = np.zeros((game.arena.height, game.arena.width))
for y in range(len(game.arena.board)):
for x in range(len(game.arena.board[0])):
out[y][x] = 1.0 if game.arena.board[y][x] == Tile.CROWD else 0.0
return out
def name(self):
return "opp crowd"
| 36.176596
| 109
| 0.614597
| 2,324
| 17,003
| 4.416093
| 0.069277
| 0.091396
| 0.07717
| 0.107181
| 0.861834
| 0.84108
| 0.836013
| 0.818279
| 0.810679
| 0.79392
| 0
| 0.012817
| 0.279598
| 17,003
| 469
| 110
| 36.253731
| 0.825047
| 0.011292
| 0
| 0.753541
| 0
| 0
| 0.021603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.150142
| false
| 0.002833
| 0.002833
| 0.070822
| 0.419263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
914bd587f35c2783a64682ceb11ca3e033648204
| 14,885
|
py
|
Python
|
deepcell/layers/normalization.py
|
jackstellwagen/deepcell-tf
|
d9326b8aceb2f25637e0d3934646da8f6a9f9539
|
[
"Apache-2.0"
] | null | null | null |
deepcell/layers/normalization.py
|
jackstellwagen/deepcell-tf
|
d9326b8aceb2f25637e0d3934646da8f6a9f9539
|
[
"Apache-2.0"
] | null | null | null |
deepcell/layers/normalization.py
|
jackstellwagen/deepcell-tf
|
d9326b8aceb2f25637e0d3934646da8f6a9f9539
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016-2019 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Layers to noramlize input images for 2D and 3D images"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import activations
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.layers import Layer, InputSpec
try: # tf v1.9 moves conv_utils from _impl to keras.utils
from tensorflow.python.keras.utils import conv_utils
except ImportError:
from tensorflow.python.keras._impl.keras.utils import conv_utils
class ImageNormalization2D(Layer):
def __init__(self,
norm_method='std',
filter_size=61,
data_format=None,
activation=None,
use_bias=False,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
self.valid_modes = {'std', 'max', None, 'whole_image'}
if norm_method not in self.valid_modes:
raise ValueError('Invalid `norm_method`: "{}". '
'Use one of {}.'.format(
norm_method, self.valid_modes))
if 'trainable' not in kwargs:
kwargs['trainable'] = False
super(ImageNormalization2D, self).__init__(
activity_regularizer=regularizers.get(activity_regularizer),
**kwargs)
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.input_spec = InputSpec(ndim=4) # hardcoded for 2D data
self.filter_size = filter_size
self.norm_method = norm_method
self.data_format = conv_utils.normalize_data_format(data_format)
if self.data_format == 'channels_first':
self.channel_axis = 1
else:
self.channel_axis = 3 # hardcoded for 2D data
if isinstance(self.norm_method, str):
self.norm_method = self.norm_method.lower()
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if len(input_shape) != 4:
raise ValueError('Inputs should have rank 4, '
'received input shape: %s' % input_shape)
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape.dims[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = int(input_shape[channel_axis])
self.input_spec = InputSpec(ndim=4, axes={channel_axis: input_dim})
kernel_shape = (self.filter_size, self.filter_size, input_dim, 1)
self.kernel = self.add_weight(
name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=False,
dtype=self.dtype)
W = np.ones(kernel_shape)
W = W / W.size
self.set_weights([W])
if self.use_bias:
self.bias = self.add_weight(
name='bias',
shape=(self.filter_size, self.filter_size),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=False,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
return tensor_shape.TensorShape(input_shape)
def _average_filter(self, inputs):
if self.data_format == 'channels_first':
inputs = K.permute_dimensions(inputs, pattern=[0, 2, 3, 1])
outputs = tf.nn.depthwise_conv2d(inputs, self.kernel, [1, 1, 1, 1],
padding='SAME', data_format='NHWC')
if self.data_format == 'channels_first':
outputs = K.permute_dimensions(outputs, pattern=[0, 3, 1, 2])
return outputs
def _window_std_filter(self, inputs, epsilon=K.epsilon()):
c1 = self._average_filter(inputs)
c2 = self._average_filter(K.square(inputs))
output = K.sqrt(c2 - c1 * c1) + epsilon
return output
def call(self, inputs):
if not self.norm_method:
outputs = inputs
elif self.norm_method == 'whole_image':
axes = [2, 3] if self.channel_axis == 1 else [1, 2]
outputs = inputs - K.mean(inputs, axis=axes, keepdims=True)
outputs = outputs / K.std(inputs, axis=axes, keepdims=True)
elif self.norm_method == 'std':
outputs = inputs - self._average_filter(inputs)
outputs = outputs / self._window_std_filter(outputs)
elif self.norm_method == 'max':
outputs = inputs / K.max(inputs)
outputs = outputs - self._average_filter(outputs)
else:
raise NotImplementedError('"{}" is not a valid norm_method'.format(
self.norm_method))
return outputs
def get_config(self):
config = {
'norm_method': self.norm_method,
'filter_size': self.filter_size,
'data_format': self.data_format,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer': regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(ImageNormalization2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class ImageNormalization3D(Layer):
def __init__(self,
norm_method='std',
filter_size=61,
data_format=None,
activation=None,
use_bias=False,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
self.valid_modes = {'std', 'max', None, 'whole_image'}
if norm_method not in self.valid_modes:
raise ValueError('Invalid `norm_method`: "{}". '
'Use one of {}.'.format(
norm_method, self.valid_modes))
if 'trainable' not in kwargs:
kwargs['trainable'] = False
super(ImageNormalization3D, self).__init__(
activity_regularizer=regularizers.get(activity_regularizer),
**kwargs)
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.input_spec = InputSpec(ndim=5) # hardcoded for 3D data
self.filter_size = filter_size
self.norm_method = norm_method
self.data_format = conv_utils.normalize_data_format(data_format)
if self.data_format == 'channels_first':
self.channel_axis = 1
else:
self.channel_axis = 4 # hardcoded for 3D data
if isinstance(self.norm_method, str):
self.norm_method = self.norm_method.lower()
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5, '
'received input shape: %s' % input_shape)
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape.dims[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'should be defined, found None: %s' % input_shape)
input_dim = int(input_shape[channel_axis])
self.input_spec = InputSpec(ndim=5, axes={channel_axis: input_dim})
if self.data_format == 'channels_first':
depth = int(input_shape[2])
else:
depth = int(input_shape[1])
kernel_shape = (depth, self.filter_size, self.filter_size, input_dim, 1)
self.kernel = self.add_weight(
'kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=False,
dtype=self.dtype)
W = np.ones(kernel_shape)
W = W / W.size
self.set_weights([W])
if self.use_bias:
self.bias = self.add_weight(
'bias',
shape=(depth, self.filter_size, self.filter_size),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=False,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
return tensor_shape.TensorShape(input_shape)
def _average_filter(self, inputs):
if self.data_format == 'channels_first':
inputs = K.permute_dimensions(inputs, pattern=[0, 2, 3, 4, 1])
# TODO: conv3d vs depthwise_conv2d?
outputs = tf.nn.conv3d(inputs, self.kernel, [1, 1, 1, 1, 1],
padding='SAME', data_format='NDHWC')
if self.data_format == 'channels_first':
outputs = K.permute_dimensions(outputs, pattern=[0, 4, 1, 2, 3])
return outputs
def _window_std_filter(self, inputs, epsilon=K.epsilon()):
c1 = self._average_filter(inputs)
c2 = self._average_filter(K.square(inputs))
output = K.sqrt(c2 - c1 * c1) + epsilon
return output
def call(self, inputs):
if not self.norm_method:
outputs = inputs
elif self.norm_method == 'whole_image':
axes = [3, 4] if self.channel_axis == 1 else [2, 3]
outputs = inputs - K.mean(inputs, axis=axes, keepdims=True)
outputs = outputs / K.std(inputs, axis=axes, keepdims=True)
elif self.norm_method == 'std':
outputs = inputs - self._average_filter(inputs)
outputs = outputs / self._window_std_filter(outputs)
elif self.norm_method == 'max':
outputs = inputs / K.max(inputs)
outputs = outputs - self._average_filter(outputs)
else:
raise NotImplementedError('"{}" is not a valid norm_method'.format(self.norm_method))
return outputs
def get_config(self):
config = {
'norm_method': self.norm_method,
'filter_size': self.filter_size,
'data_format': self.data_format,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer': regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(ImageNormalization3D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| 42.286932
| 97
| 0.625663
| 1,675
| 14,885
| 5.350448
| 0.161791
| 0.037938
| 0.034367
| 0.016068
| 0.810199
| 0.782191
| 0.765789
| 0.760991
| 0.75318
| 0.75318
| 0
| 0.010151
| 0.278603
| 14,885
| 351
| 98
| 42.407407
| 0.824455
| 0.095868
| 0
| 0.790036
| 0
| 0
| 0.077485
| 0
| 0
| 0
| 0
| 0.002849
| 0
| 1
| 0.049822
| false
| 0
| 0.053381
| 0
| 0.145907
| 0.003559
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e68362efe635861621ec36509ee6cbfb76ebbd08
| 15,197
|
py
|
Python
|
ds5-scripts/aosp_6_0/arm/DoCall.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 32
|
2021-04-08T05:39:51.000Z
|
2022-03-31T03:49:35.000Z
|
ds5-scripts/aosp_6_0/arm/DoCall.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 2
|
2021-04-14T08:31:30.000Z
|
2021-08-29T19:12:09.000Z
|
ds5-scripts/aosp_6_0/arm/DoCall.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 3
|
2021-06-08T08:52:56.000Z
|
2021-06-23T17:28:51.000Z
|
# ArtInterpreterToInterpreterBridge.py is used to .... when the "ArtInterpreterToInterpreterBridge" method is invoked in 32-bit mode.
import gc
import sys
import time
from arm_ds.debugger_v1 import Debugger
from arm_ds.debugger_v1 import DebugException
import config
import memory
import mmu
from DexParser import header_item, string_id_item, type_id_item, proto_id_item, method_id_item
# obtain current execution state
debugger = Debugger()
execution_state = debugger.getCurrentExecutionContext()
def retrieve_string_value(string_ptr):
length_val = memory.readMemory32(string_ptr + config.offset_string_length)
reference_ptr = memory.readMemory32(string_ptr + config.offset_string_reference)
char_array = memory.retrieve_char_array(reference_ptr)
return char_array
def cleanup():
if mmu.page_table is not None:
del mmu.page_table
gc.collect()
def DoCall():
# -1- for caller
# get the "shadow_frame" parameter
shadow_frame_ptr = int(execution_state.getRegisterService().getValue("R2"))
if config.debug:
print "[DoCall] shadow_frame = %0#10x" % shadow_frame_ptr
# retrieve the "method_" field of ShadowFrame structure
shadow_frame_method_ptr = memory.readMemory32(shadow_frame_ptr + config.offset_ShadowFrame_method_)
# get the pointer that refers to ArtMethod
art_method_ptr = shadow_frame_method_ptr
# read the "declaring_class_" field of ArtMethod
art_method_declaring_class_ptr = art_method_ptr + config.offset_ArtMethod_declaring_class_
# read the "root_" field of GcRoot
art_method_declaring_class_root_ptr = art_method_declaring_class_ptr + config.offset_GcRoot_root_
# read the "refeence_" field of CompressesReference
art_method_declaring_class_root_reference_ptr = art_method_declaring_class_root_ptr + config.offset_CompressesReference_reference_
art_method_declaring_class_root_reference_val = memory.readMemory32(art_method_declaring_class_root_reference_ptr)
class_ptr = art_method_declaring_class_root_reference_val
# read the "dex_cache_" field of Class
class_dex_cache_ptr = class_ptr + config.offset_Class_dex_cache_
# read the "reference_" field of HeapReference
class_dex_cache_reference_ptr = class_dex_cache_ptr + config.offset_HeapReference_reference_
class_dex_cache_reference_val = memory.readMemory32(class_dex_cache_reference_ptr)
dex_cache_ptr = class_dex_cache_reference_val
# read the "dex_file_" field of DexCache
dex_cache_dex_file_ptr = dex_cache_ptr + config.offset_DexCache_dex_file_
dex_cache_dex_file_val = memory.readMemory32(dex_cache_dex_file_ptr)
dex_file_ptr = dex_cache_dex_file_val
# read the "begin_" field of DexFile
dex_file_begin_ptr = dex_file_ptr + config.offset_DexFile_begin_
dex_file_begin_val = memory.readMemory32(dex_file_begin_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::begin_ = %0#10x" % dex_file_begin_val
# read the "size_" field of DexFile
dex_file_size_ptr = dex_file_ptr + config.offset_DexFile_size_
dex_file_size_val = memory.readMemory32(dex_file_size_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::size_ = %#x" % dex_file_size_val
# read the "location_" field of DexFile
dex_file_location_ptr = dex_file_ptr + config.offset_DexFile_location_
# retrieve the value of std::string
dex_file_location_string_val = retrieve_string_value(dex_file_location_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::location_ = %s" % dex_file_location_string_val
# # we only focus on the Java invocation appeared in the target package
# if not config.package_filter(dex_file_location_string_val):
# # continue the execution of the target application
# execution_state.getExecutionService().resume()
# cleanup()
# return
# read the "access_flags_" field of ArtMethod
art_method_access_flags_ptr = art_method_ptr + config.offset_ArtMethod_access_flags_
art_method_access_flags_value = memory.readMemory32(art_method_access_flags_ptr)
if config.debug:
print "[DoCall] ArtMethod::access_flags_ = %#x (%s)" % (art_method_access_flags_value, config.resolve_access_flags(art_method_access_flags_value))
# read the "dex_code_item_offset_" field of ArtMethod
art_method_dex_code_item_offset_ptr = art_method_ptr + config.offset_ArtMethod_dex_code_item_offset_
art_method_dex_code_item_offset_value = memory.readMemory32(art_method_dex_code_item_offset_ptr)
if config.debug:
print "[DoCall] ArtMethod::dex_code_item_offset_ = %#x" % art_method_dex_code_item_offset_value
# read the "dex_method_index_" field of ArtMethod
art_method_dex_method_index_ptr = art_method_ptr + config.offset_ArtMethod_dex_method_index_
art_method_dex_method_index_val = memory.readMemory32(art_method_dex_method_index_ptr)
if config.debug:
print "[DoCall] ArtMethod::dex_method_index_ = %#x" % art_method_dex_method_index_val
# resolve
string_ids_off = header_item.get_string_ids_off(dex_file_begin_val)
type_ids_off = header_item.get_type_ids_off(dex_file_begin_val)
proto_ids_off = header_item.get_proto_ids_off(dex_file_begin_val)
method_ids_off = header_item.get_method_ids_off(dex_file_begin_val)
class_idx = method_id_item.get_class_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
class_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, class_idx)
class_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, class_descriptor_idx)
# if config.debug:
# print "[DoCall] class name = %s" % class_descriptor_content
name_idx = method_id_item.get_name_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
name_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, name_idx)
# if config.debug:
# print "[DoCall] method name = %s" % name_content
proto_idx = method_id_item.get_proto_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
proto_return_type_idx = proto_id_item.get_return_type_idx(dex_file_begin_val, proto_ids_off, proto_idx)
proto_return_type_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, proto_return_type_idx)
proto_return_type_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, proto_return_type_descriptor_idx)
# if config.debug:
# print "[DoCall] return type = %s" % proto_return_type_descriptor_content
parameters_content = ""
proto_parameters_list = proto_id_item.get_parameters_list(dex_file_begin_val, proto_ids_off, proto_idx)
if len(proto_parameters_list) == 0:
parameters_content = "()"
else:
for parameter_idx in range(len(proto_parameters_list)):
parameter_type_idx = proto_parameters_list[parameter_idx]
parameter_type_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, parameter_type_idx)
parameter_type_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, parameter_type_descriptor_idx)
if len(proto_parameters_list) == 1:
parameters_content = parameters_content + "(" + parameter_type_descriptor_content + ")"
else:
if parameter_idx == 0:
parameters_content = parameters_content + "(" + parameter_type_descriptor_content
elif parameter_idx == (len(proto_parameters_list) - 1):
parameters_content = parameters_content + "," + parameter_type_descriptor_content + ")"
else:
parameters_content = parameters_content + "," + parameter_type_descriptor_content
# if config.debug:
# print "[DoCall] parameters = %s" % parameters_content
caller_method_signature = "%s %s->%s %s%s" % (config.resolve_method_access_flags(art_method_access_flags_value), class_descriptor_content, proto_return_type_descriptor_content, name_content, parameters_content)
if config.debug:
print "[DoCall] caller signature = %s" % caller_method_signature
# -2- for callee
# get the "called_method" parameter
callee_ptr = int(execution_state.getRegisterService().getValue("R0"))
if config.debug:
print "[DoCall] callee = %0#10x" % callee_ptr
# get the pointer that refers to ArtMethod
art_method_ptr = callee_ptr
# read the "declaring_class_" field of ArtMethod
art_method_declaring_class_ptr = art_method_ptr + config.offset_ArtMethod_declaring_class_
# read the "root_" field of GcRoot
art_method_declaring_class_root_ptr = art_method_declaring_class_ptr + config.offset_GcRoot_root_
# read the "refeence_" field of CompressesReference
art_method_declaring_class_root_reference_ptr = art_method_declaring_class_root_ptr + config.offset_CompressesReference_reference_
art_method_declaring_class_root_reference_val = memory.readMemory32(art_method_declaring_class_root_reference_ptr)
class_ptr = art_method_declaring_class_root_reference_val
# read the "dex_cache_" field of Class
class_dex_cache_ptr = class_ptr + config.offset_Class_dex_cache_
# read the "reference_" field of HeapReference
class_dex_cache_reference_ptr = class_dex_cache_ptr + config.offset_HeapReference_reference_
class_dex_cache_reference_val = memory.readMemory32(class_dex_cache_reference_ptr)
dex_cache_ptr = class_dex_cache_reference_val
# read the "dex_file_" field of DexCache
dex_cache_dex_file_ptr = dex_cache_ptr + config.offset_DexCache_dex_file_
dex_cache_dex_file_val = memory.readMemory32(dex_cache_dex_file_ptr)
dex_file_ptr = dex_cache_dex_file_val
# read the "begin_" field of DexFile
dex_file_begin_ptr = dex_file_ptr + config.offset_DexFile_begin_
dex_file_begin_val = memory.readMemory32(dex_file_begin_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::begin_ = %0#10x" % dex_file_begin_val
# read the "size_" field of DexFile
dex_file_size_ptr = dex_file_ptr + config.offset_DexFile_size_
dex_file_size_val = memory.readMemory32(dex_file_size_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::size_ = %#x" % dex_file_size_val
# read the "location_" field of DexFile
dex_file_location_ptr = dex_file_ptr + config.offset_DexFile_location_
# retrieve the value of std::string
dex_file_location_string_val = retrieve_string_value(dex_file_location_ptr)
if config.debug:
print "[DoCall] ArtMethod::declaring_class_::dex_cache_::dex_file_::location_ = %s" % dex_file_location_string_val
# read the "access_flags_" field of ArtMethod
art_method_access_flags_ptr = art_method_ptr + config.offset_ArtMethod_access_flags_
art_method_access_flags_value = memory.readMemory32(art_method_access_flags_ptr)
if config.debug:
print "[DoCall] ArtMethod::access_flags_ = %#x (%s)" % (art_method_access_flags_value, config.resolve_access_flags(art_method_access_flags_value))
# read the "dex_code_item_offset_" field of ArtMethod
art_method_dex_code_item_offset_ptr = art_method_ptr + config.offset_ArtMethod_dex_code_item_offset_
art_method_dex_code_item_offset_value = memory.readMemory32(art_method_dex_code_item_offset_ptr)
if config.debug:
print "[DoCall] ArtMethod::dex_code_item_offset_ = %#x" % art_method_dex_code_item_offset_value
# read the "dex_method_index_" field of ArtMethod
art_method_dex_method_index_ptr = art_method_ptr + config.offset_ArtMethod_dex_method_index_
art_method_dex_method_index_val = memory.readMemory32(art_method_dex_method_index_ptr)
if config.debug:
print "[DoCall] ArtMethod::dex_method_index_ = %#x" % art_method_dex_method_index_val
# resolve
string_ids_off = header_item.get_string_ids_off(dex_file_begin_val)
type_ids_off = header_item.get_type_ids_off(dex_file_begin_val)
proto_ids_off = header_item.get_proto_ids_off(dex_file_begin_val)
method_ids_off = header_item.get_method_ids_off(dex_file_begin_val)
class_idx = method_id_item.get_class_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
class_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, class_idx)
class_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, class_descriptor_idx)
# if config.debug:
# print "[DoCall] class name = %s" % class_descriptor_content
name_idx = method_id_item.get_name_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
name_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, name_idx)
# if config.debug:
# print "[DoCall] method name = %s" % name_content
proto_idx = method_id_item.get_proto_idx(dex_file_begin_val, method_ids_off, art_method_dex_method_index_val)
proto_return_type_idx = proto_id_item.get_return_type_idx(dex_file_begin_val, proto_ids_off, proto_idx)
proto_return_type_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, proto_return_type_idx)
proto_return_type_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, proto_return_type_descriptor_idx)
# if config.debug:
# print "[DoCall] return type = %s" % proto_return_type_descriptor_content
parameters_content = ""
proto_parameters_list = proto_id_item.get_parameters_list(dex_file_begin_val, proto_ids_off, proto_idx)
if len(proto_parameters_list) == 0:
parameters_content = "()"
else:
for parameter_idx in range(len(proto_parameters_list)):
parameter_type_idx = proto_parameters_list[parameter_idx]
parameter_type_descriptor_idx = type_id_item.get_descriptor_idx(dex_file_begin_val, type_ids_off, parameter_type_idx)
parameter_type_descriptor_content = string_id_item.get_string_id_item_data(dex_file_begin_val, string_ids_off, parameter_type_descriptor_idx)
if len(proto_parameters_list) == 1:
parameters_content = parameters_content + "(" + parameter_type_descriptor_content + ")"
else:
if parameter_idx == 0:
parameters_content = parameters_content + "(" + parameter_type_descriptor_content
elif parameter_idx == (len(proto_parameters_list) - 1):
parameters_content = parameters_content + "," + parameter_type_descriptor_content + ")"
else:
parameters_content = parameters_content + "," + parameter_type_descriptor_content
# if config.debug:
# print "[DoCall] parameters = %s" % parameters_content
callee_method_signature = "%s %s->%s %s%s" % (config.resolve_method_access_flags(art_method_access_flags_value), class_descriptor_content, proto_return_type_descriptor_content, name_content, parameters_content)
if config.debug:
print "[DoCall] callee signature = %s" % callee_method_signature
config.log_print("[DoCall] caller signature = %s" % caller_method_signature)
config.log_print("[DoCall] callee signature = %s" % callee_method_signature)
# continue the execution of the target application
execution_state.getExecutionService().resume()
cleanup()
return
if __name__ == '__main__':
DoCall()
sys.exit()
| 51.341216
| 212
| 0.807659
| 2,229
| 15,197
| 4.947959
| 0.063706
| 0.052679
| 0.045698
| 0.048962
| 0.914135
| 0.909783
| 0.892556
| 0.884396
| 0.868982
| 0.868982
| 0
| 0.004946
| 0.121866
| 15,197
| 295
| 213
| 51.515254
| 0.821506
| 0.156939
| 0
| 0.775281
| 0
| 0
| 0.075799
| 0.041754
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.050562
| null | null | 0.101124
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc2211a24cec079d6fe7f694b7c29806ca2a15c1
| 10,730
|
py
|
Python
|
http_requests.py
|
Deviathan/Web_Bunny
|
48608a56d0a8b335255c10053619f8d55529fb75
|
[
"MIT"
] | 1
|
2019-08-09T12:42:00.000Z
|
2019-08-09T12:42:00.000Z
|
http_requests.py
|
Deviathan/Web_Bunny
|
48608a56d0a8b335255c10053619f8d55529fb75
|
[
"MIT"
] | null | null | null |
http_requests.py
|
Deviathan/Web_Bunny
|
48608a56d0a8b335255c10053619f8d55529fb75
|
[
"MIT"
] | null | null | null |
import requests
def http_requests(c_s,request_type,url,cua,inua):
global r, error
error = False
request_headers , request_reply , request_text, r , request_encoding = "", "", "", "", ""
#checks the url if http:// or https:// is inserted
if url.startswith('http://') or url.startswith('https://'):
url = url
#if its not it adds http:// to the prefix of the url
else:
prefix = 'http://'
url = prefix + url
#POST REQUEST
if request_type == "POST":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.post(url , headers=headers)
else:
s = requests.Session()
r = s.post(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
r = requests.post(url , headers=headers)
else:
r = requests.post(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
#GET REQUEST
if request_type == "GET":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.get(url , headers=headers)
else:
s = requests.Session()
r = s.get(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
r = requests.get(url , headers=headers)
else:
r = requests.get(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = "","","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
#PUT REQUEST
if request_type == "PUT":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.put(url , headers=headers)
else:
s = requests.Session()
r = s.put(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
r = requests.post(url , headers=headers)
else:
r = requests.put(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
#DELETE REQUEST
if request_type == "DELETE":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.delete(url , headers=headers)
else:
s = requests.Session()
r = s.delete(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
r = requests.delete(url , headers=headers)
else:
r = requests.delete(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
#HEAD REQUEST
if request_type == "HEAD":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.head(url , headers=headers)
else:
s = requests.Session()
r = s.head(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
r = requests.head(url , headers=headers)
else:
r = requests.head(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
#OPTIONS REQUEST
if request_type == "OPTIONS":
try:
#SESSION
if c_s == ("YES"):
#USER AGENT
if cua == ("YES"):
headers = {'user-agent': inua}
s = requests.Session()
r = s.options(url , headers=headers)
else:
s = requests.Session()
r = s.options(url)
elif c_s == ("NO"):
if cua == ("YES"):
headers = {'user-agent': inua}
else:
r = requests.options(url)
request_headers,request_reply,request_text,request_encoding=output(r)
except requests.exceptions.Timeout as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.TooManyRedirects as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
except requests.exceptions.RequestException as err:
error = True
r,request_headers, request_reply,request_text,request_encoding = err,"","","",""
return r , request_headers , request_reply , request_text , request_encoding, error
error = False
return r , request_headers , request_reply , request_text , request_encoding, error
def output(r):
request_headers = r.request.headers
request_reply = r.headers
request_text = r.text
request_encoding = r.encoding
return request_headers,request_reply,request_text,request_encoding
| 45.466102
| 95
| 0.550979
| 1,100
| 10,730
| 5.17
| 0.052727
| 0.128011
| 0.188324
| 0.233163
| 0.907684
| 0.900651
| 0.878319
| 0.868824
| 0.85968
| 0.818534
| 0
| 0
| 0.345294
| 10,730
| 236
| 96
| 45.466102
| 0.809537
| 0.026002
| 0
| 0.79602
| 0
| 0
| 0.022527
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00995
| false
| 0
| 0.004975
| 0
| 0.139303
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fc33c2e1f784d8653ee392395f463dee04a0836c
| 5,369
|
py
|
Python
|
PetExp2Lvl.py
|
tonkaew131/Best_Minion
|
875ff66ddbc2a4a309f72017cd57cef3774e49e5
|
[
"MIT"
] | null | null | null |
PetExp2Lvl.py
|
tonkaew131/Best_Minion
|
875ff66ddbc2a4a309f72017cd57cef3774e49e5
|
[
"MIT"
] | null | null | null |
PetExp2Lvl.py
|
tonkaew131/Best_Minion
|
875ff66ddbc2a4a309f72017cd57cef3774e49e5
|
[
"MIT"
] | 2
|
2020-09-17T06:53:03.000Z
|
2021-05-14T13:41:38.000Z
|
def GetPetLevel(exp, tier):
if tier == "COMMON":
level = [0, 100, 210, 330, 460, 605, 765, 940, 1130, 1340, 1570, 1820, 2095, 2395, 2725, 3085, 3485, 3925, 4415, 4955, 5555, 6215, 6945, 7745, 8625, 9585, 10635, 11785, 13045, 14425, 15935, 17585, 19385, 21345, 23475, 25785, 28285, 30985, 33905, 37065, 40485, 44185, 48185, 52535, 57285, 62485, 68185, 74485, 81485, 89285, 97985, 107685, 118485, 130485, 143785, 158485, 174685, 192485, 211985, 233285, 256485, 281685, 309085, 338885, 371285, 406485, 444685, 486085, 530885, 579285, 631485, 687685, 748085, 812885, 882285, 956485, 1035685, 1120385, 1211085, 1308285, 1412485, 1524185, 1643885, 1772085, 1909285, 2055985, 2212685, 2380385, 2560085,
2752785, 2959485, 3181185, 3418885, 3673585, 3946285, 4237985, 4549685, 4883385, 5241085, 5624785, 6036485]
if exp > level[-1]:
return 100
for expp in range(len(level)):
if level[expp] == level[-1]:
return 100
if level[expp] < exp < level[expp + 1]:
return expp
elif tier == "UNCOMMON":
level = [0, 765, 940, 1130, 1340, 1570, 1820, 2095, 2395, 2725, 3085, 3485, 3925, 4415, 4955, 5555, 6215, 6945, 7745, 8625, 9585, 10635, 11785, 13045, 14425, 15935, 17585, 19385, 21345, 23475, 25785, 28285, 30985, 33905, 37065, 40485, 44185, 48185, 52535, 57285, 62485, 68185, 74485, 81485, 89285, 97985, 107685, 118485, 130485, 143785, 158485, 174685, 192485, 211985, 233285, 256485, 281685, 309085, 338885, 371285, 406485, 444685, 486085, 530885, 579285, 631485, 687685, 748085, 812885, 882285, 956485, 1035685, 1120385, 1211085, 1308285, 1412485, 1524185, 1643885, 1772085, 1909285, 2055985, 2212685, 2380385, 2560085, 2752785, 2959485, 3181185, 3418885, 3673585, 3946285, 4237985, 4549685, 4883385, 5241085, 5624785, 6036485, 6478185, 6954885, 7471585, 8033285, 8644985]
if exp > level[-1]:
return 100
for expp in range(len(level)):
if level[expp] == level[-1]:
return 100
if level[expp] < exp < level[expp + 1]:
return expp
elif tier == "RARE":
level = [0, 1820, 2095, 2395, 2725, 3085, 3485, 3925, 4415, 4955, 5555, 6215, 6945, 7745, 8625, 9585, 10635, 11785, 13045, 14425, 15935, 17585, 19385, 21345, 23475, 25785, 28285, 30985, 33905, 37065, 40485, 44185, 48185, 52535, 57285, 62485, 68185, 74485, 81485, 89285, 97985, 107685, 118485, 130485, 143785, 158485, 174685, 192485, 211985, 233285, 256485, 281685, 309085, 338885, 371285, 406485, 444685, 486085,
530885, 579285, 631485, 687685, 748085, 812885, 882285, 956485, 1035685, 1120385, 1211085, 1308285, 1412485, 1524185, 1643885, 1772085, 1909285, 2055985, 2212685, 2380385, 2560085, 2752785, 2959485, 3181185, 3418885, 3673585, 3946285, 4237985, 4549685, 4883385, 5241085, 5624785, 6036485, 6478185, 6954885, 7471585, 8033285, 8644985, 9311685, 10038385, 10830085, 11691785, 12628485]
if exp > level[-1]:
return 100
for expp in range(len(level)):
if level[expp] == level[-1]:
return 100
if level[expp] < exp < level[expp + 1]:
return expp
elif tier == "EPIC":
level = [0, 3485, 3925, 4415, 4955, 5555, 6215, 6945, 7745, 8625, 9585, 10635, 11785, 13045, 14425, 15935, 17585, 19385, 21345, 23475, 25785, 28285, 30985, 33905, 37065, 40485, 44185, 48185, 52535, 57285, 62485,
68185, 74485, 81485, 89285, 97985, 107685, 118485, 130485, 143785, 158485, 174685, 192485, 211985, 233285, 256485, 281685, 309085, 338885, 371285, 406485, 444685, 486085, 530885, 579285, 631485, 687685, 748085, 812885, 882285, 956485, 1035685, 1120385, 1211085, 1308285, 1412485, 1524185, 1643885, 1772085, 1909285, 2055985, 2212685, 2380385, 2560085, 2752785, 2959485, 3181185, 3418885, 3673585, 3946285, 4237985, 4549685, 4883385, 5241085, 5624785, 6036485, 6478185, 6954885, 7471585, 8033285, 8644985, 9311685, 10038385, 10830085, 11691785, 12628485, 13645185, 14746885, 15938585, 17225285, 18611985]
if exp > level[-1]:
return 100
for expp in range(len(level)):
if level[expp] == level[-1]:
return 100
if level[expp] < exp < level[expp + 1]:
return expp
elif tier == "LEGENDARY":
level = [0, 5555, 6215, 6945, 7745, 8625, 9585, 10635, 11785, 13045, 14425, 15935, 17585, 19385, 21345, 23475, 25785, 28285, 30985, 33905, 37065, 40485, 44185, 48185, 52535, 57285, 62485, 68185, 74485, 81485, 89285, 97985, 107685, 118485, 130485, 143785, 158485, 174685, 192485, 211985, 233285, 256485, 281685, 309085, 338885, 371285, 406485, 444685, 486085, 530885, 579285, 631485, 687685, 748085, 812885, 882285, 956485, 1035685, 1120385, 1211085, 1308285, 1412485, 1524185, 1643885, 1772085, 1909285, 2055985, 2212685, 2380385, 2560085, 2752785, 2959485, 3181185, 3418885, 3673585, 3946285, 4237985, 4549685, 4883385, 5241085, 5624785, 6036485, 6478185, 6954885, 7471585, 8033285, 8644985, 9311685, 10038385, 10830085, 11691785, 12628485, 13645185, 14746885, 15938585, 17225285, 18611985, 20108685, 21725385, 23472085, 25358785]
if exp > level[-1]:
return 100
for expp in range(len(level)):
if level[expp] == level[-1]:
return 100
if level[expp] < exp < level[expp + 1]:
return expp
| 107.38
| 839
| 0.660086
| 669
| 5,369
| 5.297459
| 0.207773
| 0.029628
| 0.03386
| 0.042325
| 0.961907
| 0.961907
| 0.961907
| 0.961907
| 0.961907
| 0.961907
| 0
| 0.702703
| 0.214379
| 5,369
| 49
| 840
| 109.571429
| 0.137506
| 0
| 0
| 0.714286
| 0
| 0
| 0.005774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020408
| false
| 0
| 0
| 0
| 0.326531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
fc3afd012bce83fcfcc49016f72fdd9fe29f45d1
| 179
|
py
|
Python
|
test/system/benchmark/lib/printer.py
|
so931/poseidonos
|
2aa82f26bfbd0d0aee21cd0574779a655634f08c
|
[
"BSD-3-Clause"
] | null | null | null |
test/system/benchmark/lib/printer.py
|
so931/poseidonos
|
2aa82f26bfbd0d0aee21cd0574779a655634f08c
|
[
"BSD-3-Clause"
] | null | null | null |
test/system/benchmark/lib/printer.py
|
so931/poseidonos
|
2aa82f26bfbd0d0aee21cd0574779a655634f08c
|
[
"BSD-3-Clause"
] | null | null | null |
def red(arg):
print(f"\033[1m\033[31m{arg}\033[0m")
def green(arg):
print(f"\033[1m\033[32m{arg}\033[0m")
def yellow(arg):
print(f"\033[1m\033[33m{arg}\033[0m")
| 13.769231
| 41
| 0.597765
| 36
| 179
| 2.972222
| 0.361111
| 0.224299
| 0.252336
| 0.336449
| 0.476636
| 0.476636
| 0
| 0
| 0
| 0
| 0
| 0.256579
| 0.150838
| 179
| 12
| 42
| 14.916667
| 0.447368
| 0
| 0
| 0
| 0
| 0
| 0.457627
| 0.457627
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
fc402ad7cbcbb4b4574e3cf978c33b44976d16e2
| 79
|
py
|
Python
|
baseline/pytorch/lm/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 241
|
2016-04-25T20:02:31.000Z
|
2019-09-03T05:44:09.000Z
|
baseline/pytorch/lm/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 131
|
2019-10-12T10:53:17.000Z
|
2021-12-03T19:52:47.000Z
|
baseline/pytorch/lm/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 75
|
2016-06-28T01:18:58.000Z
|
2019-08-29T06:47:22.000Z
|
from baseline.pytorch.lm.model import *
from baseline.pytorch.lm.train import *
| 39.5
| 39
| 0.810127
| 12
| 79
| 5.333333
| 0.583333
| 0.375
| 0.59375
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 40
| 39.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fc6b320fd1a3e7c21bcb176e2ac1d2c4c9116b50
| 12,688
|
py
|
Python
|
tests/api/test_hipchat.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 25
|
2017-10-23T09:22:06.000Z
|
2021-09-15T11:04:51.000Z
|
tests/api/test_hipchat.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 37
|
2017-10-18T15:40:18.000Z
|
2021-12-19T12:59:29.000Z
|
tests/api/test_hipchat.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 17
|
2017-10-19T08:39:09.000Z
|
2021-11-01T09:35:05.000Z
|
''' Tests for boss.api.hipchat module. '''
from pytest import fixture
from mock import patch
from boss.api import hipchat
from boss.core.constants.notification_types import (
DEPLOYMENT_STARTED,
DEPLOYMENT_FINISHED,
RUNNING_SCRIPT_STARTED,
RUNNING_SCRIPT_FINISHED
)
@fixture(scope='function')
def base_url():
return hipchat.API_BASE_URL.format(
company_name=hipchat.config()['company_name'],
room_id=hipchat.config()['room_id'],
auth_token=hipchat.config()['auth_token']
)
def test_create_link():
''' Test hipchat.create_link(). '''
url = 'http://test-link-url'
title = 'Test link'
expected_link = '<a href="{url}">{title}</a>'.format(url=url, title=title)
assert hipchat.create_link(url, title) == expected_link
def test_create_link_supports_empty_url():
''' Test hipchat.create_link() supports empty url. '''
assert hipchat.create_link(None, 'Test') == 'Test'
def test_notity_deploying(base_url):
''' Test hipchat.notify_deploying(). '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
commit='tttt',
commit_url='http://commit-url',
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying <a href="http://repository-url">project-name</a>:<a href="http://branch-url">temp</a> (<a href="http://commit-url">tttt</a>) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployed(base_url):
''' Test hipchat.notify_deployed(). '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
commit='tttt',
commit_url='http://commit-url',
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'purple',
'message': 'user finished deploying <a href="http://repository-url">project-name</a>:<a href="http://branch-url">temp</a> (<a href="http://commit-url">tttt</a>) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_deployment_finished_notification_with_no_repository_url(base_url):
''' Test deployment finished notification with no repository url. '''
notify_params = dict(
branch='temp',
commit='tttt',
branch_url=None,
commit_url=None,
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url=None,
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'purple',
'message': 'user finished deploying project-name:temp (tttt) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_deployment_started_notification_with_no_repository_url(base_url):
''' Test deployment started notification with no repository url. '''
notify_params = dict(
branch='temp',
commit='tttt',
branch_url=None,
commit_url=None,
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url=None,
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying project-name:temp (tttt) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployment_finished_with_no_commit(base_url):
''' Test deployment finished notification with no commit. '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'purple',
'message': 'user finished deploying <a href="http://repository-url">project-name</a>:<a href="http://branch-url">temp</a> to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployment_started_with_no_commit(base_url):
''' Test deployment started notification with no commit. '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying <a href="http://repository-url">project-name</a>:<a href="http://branch-url">temp</a> to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deploying_with_no_branch(base_url):
'''
Test hipchat.notify_deploying() doesn't show branch link,
if branch is not provided.
'''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
commit='tttt',
commit_url='http://commit-url',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying <a href="http://repository-url">project-name</a> (<a href="http://commit-url">tttt</a>) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployed_with_no_branch(base_url):
'''
Test hipchat.notify_deployed() doesn't show branch link,
if branch is not provided.
'''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
commit='tttt',
commit_url='http://commit-url',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'purple',
'message': 'user finished deploying <a href="http://repository-url">project-name</a> (<a href="http://commit-url">tttt</a>) to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployment_finished_with_no_commit_no_branch(base_url):
''' Test deployment finished notification with no commit and no branch. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'purple',
'message': 'user finished deploying <a href="http://repository-url">project-name</a> to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployment_started_with_no_commit_no_branch(base_url):
''' Test deployment started notification with no commit and no branch. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying <a href="http://repository-url">project-name</a> to <a href="http://public-url">stage</a> server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_notity_deployment_started_no_links_at_all(base_url):
''' Test deployment started notification with no links or urls at all. '''
notify_params = dict(
project_name='project-name',
server_name='staging',
user='user',
)
payload = {
'color': 'green',
'message': 'user is deploying project-name to staging server.',
'notify': True,
'message_format': 'html'
}
with patch('requests.post') as mock_post:
hipchat.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_send_running_script_started_notification(base_url):
''' Test send() sends RUNNING_SCRIPT_STARTED notfication. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
script='migration',
user='user'
)
payload = {
'color': 'green',
'notify': True,
'message_format': 'html',
'message': 'user is running <a href="http://repository-url">project-name</a>:migration on <a href="http://public-url">stage</a> server.'
}
with patch('requests.post') as mock_post:
hipchat.send(RUNNING_SCRIPT_STARTED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
def test_send_running_script_finished_notification(base_url):
''' Test send() sends RUNNING_SCRIPT_FINISHED notfication. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
script='migration',
user='user'
)
payload = {
'color': 'purple',
'notify': True,
'message_format': 'html',
'message': 'user finished running <a href="http://repository-url">project-name</a>:migration on <a href="http://public-url">stage</a> server.'
}
with patch('requests.post') as mock_post:
hipchat.send(RUNNING_SCRIPT_FINISHED, **notify_params)
mock_post.assert_called_once_with(base_url, json=payload)
| 34.857143
| 219
| 0.632409
| 1,547
| 12,688
| 4.974144
| 0.063995
| 0.05575
| 0.035088
| 0.062378
| 0.907992
| 0.899805
| 0.88499
| 0.875114
| 0.844185
| 0.813775
| 0
| 0
| 0.21674
| 12,688
| 363
| 220
| 34.953168
| 0.774301
| 0.070697
| 0
| 0.732877
| 0
| 0.041096
| 0.339847
| 0.028799
| 0
| 0
| 0
| 0
| 0.05137
| 1
| 0.054795
| false
| 0
| 0.013699
| 0.003425
| 0.071918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fca6aadb4d11838ffce6d7325f3639a82bc4128e
| 39,949
|
py
|
Python
|
pyroms/tools.py
|
BobTorgerson/Pyroms
|
6a8d82adf8cd62a11636e9dac7b4d4b7cd365441
|
[
"BSD-3-Clause"
] | 1
|
2021-12-28T13:50:45.000Z
|
2021-12-28T13:50:45.000Z
|
pyroms/tools.py
|
BobTorgerson/Pyroms
|
6a8d82adf8cd62a11636e9dac7b4d4b7cd365441
|
[
"BSD-3-Clause"
] | null | null | null |
pyroms/tools.py
|
BobTorgerson/Pyroms
|
6a8d82adf8cd62a11636e9dac7b4d4b7cd365441
|
[
"BSD-3-Clause"
] | 2
|
2020-08-31T19:54:25.000Z
|
2021-12-28T13:50:47.000Z
|
# encoding: utf-8
import numpy as np
import _iso
import pyroms
def zslice(var, depth, grd, Cpos='rho', vert=False, mode='linear'):
"""
zslice, lon, lat = zslice(var, depth, grd)
optional switch:
- Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
- mode='linear' or 'spline' specify the type of interpolation
return a constant-z slice at depth depth from 3D variable var
lon and lat contain the C-grid position of the slice for plotting.
If vert=True, lon and lat contain contain the position of the
verticies (to be used with pcolor)
"""
if mode=='linear':
imode=0
elif mode=='spline':
imode=1
else:
imode=0
raise Warning, '%s not supported, defaulting to linear' % mode
# compute the depth on Arakawa-C grid position
if Cpos is 'u':
# average z_r at Arakawa-C u points
z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:])
lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:])
else:
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r at Arakawa-C v points
z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'w':
# for temp, salt, rho
z = grd.vgrid.z_w[0,:]
if vert == True:
lon = grd.hgrid.lon_vert[:]
lat = grd.hgrid.lat_vert[:]
else:
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
elif Cpos is 'rho':
# for temp, salt, rho
z = grd.vgrid.z_r[0,:]
if vert == True:
lon = grd.hgrid.lon_vert[:]
lat = grd.hgrid.lat_vert[:]
else:
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
assert len(z.shape) == 3, 'z must be 3D'
assert len(var.shape) == 3, 'var must be 3D'
assert z.shape == var.shape, 'data and prop must be the same size'
depth = -abs(depth)
depth = depth * np.ones(z.shape[1:])
zslice = _iso.zslice(z, var, depth, imode)
# mask land
zslice = np.ma.masked_where(mask == 0, zslice)
# mask region with shalower depth than requisted depth
zslice = np.ma.masked_where(zslice == 1e20, zslice)
return zslice, lon, lat
def sslice(var, sindex, grd, Cpos='rho', vert=False):
"""
sslice, lon, lat = sslice(var, sindex, grd)
optional switch:
- Cpos='rho', 'u' or 'v' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
- mode='linear' or 'spline' specify the type of interpolation
return a constant-s slice at index sindex from 3D variable var
lon and lat contain the C-grid position of the slice for plotting.
If vert=True, lon and lat contain contain the position of the
verticies (to be used with pcolor)
"""
# compute the depth on Arakawa-C grid position
if Cpos is 'u':
# average z_r at Arakawa-C u points
z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:])
lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:])
else:
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r at Arakawa-C v points
z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'rho':
# for temp, salt, rho, w
z = grd.vgrid.z_r[0,:]
if vert == True:
lon = grd.hgrid.lon_vert[:]
lat = grd.hgrid.lat_vert[:]
else:
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
assert len(var.shape) == 3, 'var must be 3D'
sslice = var[sindex,:,:]
# mask land
sslice = np.ma.masked_where(mask == 0, sslice)
return sslice, lon, lat
def islice(var, iindex, grd, Cpos='rho', vert=False):
"""
islice, z, lon, lat = islice(var, iindex, grd)
optional switch:
- Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
return a constant-i slice at index iindex from 3D variable var
lon, lat and z contain the C-grid position of the slice for plotting.
If vert=True, lon, lat and z contain contain the position of the
verticies (to be used with pcolor)
"""
# compute the depth on Arakawa-C grid position
if Cpos is 'u':
# average z_r at Arakawa-C u points
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1)
lon = grd.hgrid.lon_vert[:,1:-1]
lat = grd.hgrid.lat_vert[:,1:-1]
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r at Arakawa-C v points
if vert == True:
z = grd.vgrid.z_w[0,:]
lon = grd.hgrid.lon_rho
lat = grd.hgrid.lat_rho
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'w':
# for w, AKt, ...
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:-1,:,:] + z[1:,:,:])
z = np.concatenate((np.array(grd.vgrid.z_w[0,0,:,:], ndmin=3), \
z, \
np.array(grd.vgrid.z_w[0,-1,:,:], ndmin=3)), 0)
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
z = grd.vgrid.z_w[0,:]
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
elif Cpos is 'rho':
# for temp, salt, rho, ...
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1)
lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:])
lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:])
else:
z = grd.vgrid.z_r[0,:]
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
# get constant-i slice
vari = var[:,:,iindex]
zi = z[:,:,iindex]
loni = np.tile(lon[:,iindex], (zi.shape[0], 1))
lati = np.tile(lat[:,iindex], (zi.shape[0], 1))
# land/sea mask
maski = np.tile(mask[:,iindex], (vari.shape[0], 1))
vari = np.ma.masked_where(maski[:,:] == 0, vari[:,:])
return vari, zi, loni, lati
def jslice(var, jindex, grd, Cpos='rho', vert=False):
"""
jslice, z, lon, lat = jslice(var, jindex, grd)
optional switch:
- Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
return a constant-j slice at index jindex from 3D variable var
lon, lat and z contain the C-grid position of the slice for plotting.
If vert=True, lon, lat and z contain contain the position of the
verticies (to be used with pcolor)
"""
# compute the depth on Arakawa-C grid position
if Cpos is 'u':
# average z_r at Arakawa-C u points
if vert == True:
z = grd.vgrid.z_w[0,:]
lon = grd.hgrid.lon_rho
lat = grd.hgrid.lat_rho
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r at Arakawa-C v points
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
lon = grd.hgrid.lon_vert[1:-1,:]
lat = grd.hgrid.lat_vert[1:-1,:]
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'w':
# for w, AKt, ...
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:-1,:,:] + z[1:,:,:])
z = np.concatenate((np.array(grd.vgrid.z_w[0,0,:,:], ndmin=3), \
z, \
np.array(grd.vgrid.z_w[0,-1,:,:], ndmin=3)), 0)
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
z = grd.vgrid.z_w[0,:]
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
elif Cpos is 'rho':
# for temp, salt, rho, ...
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
z = grd.vgrid.z_r[0,:]
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
# get constant-j slice
varj = var[:,jindex,:]
zj = z[:,jindex,:]
lonj = np.tile(lon[jindex,:], (zj.shape[0], 1))
latj = np.tile(lat[jindex,:], (zj.shape[0], 1))
# land/sea mask
maskj = np.tile(mask[jindex,:], (varj.shape[0], 1))
varj = np.ma.masked_where(maskj[:,:] == 0, varj[:,:])
return varj, zj, lonj, latj
def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False):
"""
isoslice, lon, lat = isoslice(variable,property, isoval, grd)
optional switch:
- Cpos='rho', 'u' or 'v' specify the C-grid position where
the variable rely
- masking=True mask the output if True
- vert=True/False If True, return the position of
the verticies
- mode='linear' or 'spline' specify the type of interpolation
result is a projection of variable at property == isoval in the first
nonsingleton dimension. In the case when there is more than one zero
crossing, the results are averaged.
lon, and lat contain the C-grid position of the slice for plotting.
If vert=True, lon and lat and z contain contain the position of the
verticies (to be used with pcolor)
EXAMPLE:
s_at_m5 = isoslice(s,z,-5); # s at z == -5
h_at_s30 = isoslice(z,s,30); # z at s == 30
"""
if (len(var.squeeze().shape)<=2):
raise ValueError, 'variable must have at least two dimensions'
if not prop.shape == var.shape:
raise ValueError, 'dimension of var and prop must be identical'
# compute the depth on Arakawa-C grid position
if Cpos is 'u':
# average z_r at Arakawa-C u points
z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:])
lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:])
else:
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r at Arakawa-C v points
z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:])
if vert == True:
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'rho':
# for temp, salt, rho, w
z = grd.vgrid.z_r[0,:]
if vert == True:
lon = grd.hgrid.lon_vert[:]
lat = grd.hgrid.lat_vert[:]
else:
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
prop = prop-isoval
sz = np.shape(var)
var = var.reshape(sz[0],-1)
prop = prop.reshape(sz[0],-1)
#find zero-crossings (zc == 1)
zc = np.where( (prop[:-1,:] * prop[1:,:])<0 ,1., 0.)
varl = var[:-1,:] * zc
varh = var[1:,:] * zc
propl = prop[:-1,:] * zc
proph = prop[1:,:] * zc
isoslice = varl - propl * (varh - varl) / (proph - propl)
isoslice = np.where(zc==1., isoslice, 0.)
szc = zc.sum(axis=0)
szc = np.where(szc==0., 1, szc)
isoslice = isoslice.sum(axis=0)/szc
if masking:
isoslice = np.ma.masked_where(zc.sum(axis=0)==0, isoslice)
if all(isoslice.mask):
raise Warning, 'property==%f out of range (%f, %f)' % \
(isoval, (prop+isoval).min(), (prop+isoval).max())
isoslice = isoslice.reshape(sz[1:])
# mask land
isoslice = np.ma.masked_where(mask == 0, isoslice)
return isoslice, lon, lat
def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \
spval=1e37):
"""
transect, z, lon, lat = transect(var, istart, iend, jstart, jend, grd)
optional switch:
- Cpos='rho', 'u' or 'v' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
- spval special value
- rtol tolerance parameter
return a vertical transect between the points P1=(istart, jstart)
and P2=(iend, jend) from 3D variable var
lon, lat and z contain the C-grid position of the section for plotting.
If vert=True, lon, lat and z contain contain the position of the
verticies (to be used with pcolor)
"""
# compute the depth on Arakawa-C grid position and get grid information
if Cpos is 'u':
# average z_r and z_w at Arakawa-C u points
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1)
lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:])
lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:])
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
lon = grd.hgrid.lon_u[:]
lat = grd.hgrid.lat_u[:]
mask = grd.hgrid.mask_u[:]
elif Cpos is 'v':
# average z_r and z_w at Arakawa-C v points
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:])
lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:])
else:
z = grd.vgrid.z_r[0,:]
z = 0.5 * (z[:,-1:,:] + z[:,1:,:])
lon = grd.hgrid.lon_v[:]
lat = grd.hgrid.lat_v[:]
mask = grd.hgrid.mask_v[:]
elif Cpos is 'rho':
# for temp, salt, rho
if vert == True:
z = grd.vgrid.z_w[0,:]
z = 0.5 * (z[:,:,:-1] + z[:,:,1:])
z = 0.5 * (z[:,:-1,:] + z[:,1:,:])
z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2)
z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1)
lon = grd.hgrid.lon_vert[:]
lat = grd.hgrid.lat_vert[:]
else:
z = grd.vgrid.z_r[0,:]
lon = grd.hgrid.lon_rho[:]
lat = grd.hgrid.lat_rho[:]
mask = grd.hgrid.mask_rho[:]
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
# Find the nearest point between P1 (imin,jmin) and P2 (imax, jmax)
# -----------------------------------------------------------------
# Initialization
i0=istart; j0=jstart; i1=iend; j1=jend
istart = float(istart); iend = float(iend)
jstart = float(jstart); jend = float(jend)
# Compute equation: j = aj i + bj
if istart != iend:
aj = (jend - jstart ) / (iend - istart)
bj = jstart - aj * istart
else:
aj=10000.
bj=0.
# Compute equation: i = ai j + bi
if jstart != jend:
ai = (iend - istart ) / ( jend - jstart )
bi = istart - ai * jstart
else:
ai=10000.
bi=0.
# Compute the integer pathway:
# Chose the strait line with the smallest slope
if (abs(aj) <= 1 ):
# Here, the best line is y(x)
print 'Here, the best line is y(x)'
# If i1 < i0 swap points and remember it has been swapped
if (i1 < i0 ):
i = i0 ; j = j0
i0 = i1 ; j0 = j1
i1 = i ; j1 = j
# compute the nearest j point on the line crossing at i
n=0
near = np.zeros(((i1-i0+1),4))
for i in range(i0,i1+1):
jj = aj*i + bj
near[n,0] = i
near[n,1] = jj
near[n,2] = np.floor(jj)
near[n,3] = np.ceil(jj)
n = n + 1
if vert == False:
nearp = np.zeros(((i1-i0+1),4))
nearp = near
else:
# compute the nearest j vert point on the line crossing at i
n=0
nearp = np.zeros(((i1-i0+2),4))
for i in range(i0,i1+2):
jj = aj*(i-0.5) + bj
nearp[n,0] = i
nearp[n,1] = jj
nearp[n,2] = np.floor(jj)
nearp[n,3] = np.ceil(jj)
n = n + 1
else:
# Here, the best line is x(y)
print 'Here, the best line is x(y)'
# If j1 < j0 swap points and remember it has been swapped
if (j1 < j0 ):
i = i0 ; j = j0
i0 = i1 ; j0 = j1
i1 = i ; j1 = j
# compute the nearest i point on the line crossing at j
n=0
near = np.zeros(((j1-j0+1),4))
for j in range(j0,j1+1):
ii = ai*j + bi
near[n,0] = j
near[n,1] = ii
near[n,2] = np.floor(ii)
near[n,3] = np.ceil(ii)
n = n + 1
if vert == False:
nearp = np.zeros(((j1-j0+1),4))
nearp = near
else:
# compute the nearest i vert point on the line crossing at j
n=0
nearp = np.zeros(((j1-j0+2),4))
for j in range(j0,j1+2):
ii = ai*(j-0.5) + bi
nearp[n,0] = j
nearp[n,1] = ii
nearp[n,2] = np.floor(ii)
nearp[n,3] = np.ceil(ii)
n = n + 1
# Now interpolate between the nearest point through the section
# -------------------------------------------------------------
# Initialize output variables
nlev = z.shape[0]
transect = np.zeros((grd.vgrid.N, near.shape[0]))
zs = np.zeros((nlev, nearp.shape[0]))
lons = np.zeros((nlev, nearp.shape[0]))
lats = np.zeros((nlev, nearp.shape[0]))
# mask variable
for k in range(var.shape[0]):
var[k,:,:] = np.ma.masked_where(mask == 0, var[k,:,:])
for n in range(near.shape[0]):
if (abs(aj) <= 1 ):
# check if our position match a grid cell
if (near[n,2] == near[n,3]):
transect[:,n] = var[:, near[n,2], near[n,0]]
else:
if mask[near[n,3], near[n,0]] == 0 or mask[near[n,2], near[n,0]] == 0:
transect[:,n] = spval
else:
transect[:,n] = (near[n,1] - near[n,2]) * var[:, near[n,3], near[n,0]] + \
(near[n,3] - near[n,1]) * var[:, near[n,2], near[n,0]]
else:
# check if our position match a grid cell
if (near[n,2] == near[n,3]):
transect[:,n] = var[:, near[n,0], near[n,2]]
else:
if mask[near[n,0], near[n,3]] == 0 or mask[near[n,0], near[n,2]] == 0:
transect[:,n] = spval
else:
transect[:,n] = (near[n,1] - near[n,2]) * var[:, near[n,0], near[n,3]] + \
(near[n,3] - near[n,1]) * var[:, near[n,0], near[n,2]]
for n in range(nearp.shape[0]):
if (abs(aj) <= 1 ):
# check if our position match a grid cell
if (nearp[n,2] == nearp[n,3]):
zs[:,n] = z[:, nearp[n,2], nearp[n,0]]
lons[:,n] = lon[nearp[n,2], nearp[n,0]]
lats[:,n] = lat[nearp[n,2], nearp[n,0]]
else:
zs[:,n] = (nearp[n,1] - nearp[n,2]) * z[:, nearp[n,3], nearp[n,0]] + \
(nearp[n,3] - nearp[n,1]) * z[:, nearp[n,2], nearp[n,0]]
lons[:,n] = (nearp[n,1] - nearp[n,2]) * lon[nearp[n,3], nearp[n,0]] + \
(nearp[n,3] - nearp[n,1]) * lon[nearp[n,2], nearp[n,0]]
lats[:,n] = (nearp[n,1] - nearp[n,2]) * lat[nearp[n,3], nearp[n,0]] + \
(nearp[n,3] - nearp[n,1]) * lat[nearp[n,2], nearp[n,0]]
else:
# check if our position match a grid cell
if (nearp[n,2] == nearp[n,3]):
zs[:,n] = z[:, nearp[n,0], nearp[n,2]]
lons[:,n] = lon[nearp[n,0], nearp[n,2]]
lats[:,n] = lat[nearp[n,0], nearp[n,2]]
else:
zs[:,n] = (nearp[n,1] - nearp[n,2]) * z[:, nearp[n,0], nearp[n,3]] + \
(nearp[n,3] - nearp[n,1]) * z[:, nearp[n,0], nearp[n,2]]
lons[:,n] = (nearp[n,1] - nearp[n,2]) * lon[nearp[n,0], nearp[n,3]] + \
(nearp[n,3] - nearp[n,1]) * lon[nearp[n,0], nearp[n,2]]
lats[:,n] = (nearp[n,1] - nearp[n,2]) * lat[nearp[n,0], nearp[n,3]] + \
(nearp[n,3] - nearp[n,1]) * lat[nearp[n,0], nearp[n,2]]
# mask transect
transect = np.ma.masked_values(transect, spval)
return transect, zs, lons, lats
def lonslice(var, longitude, grd, Cpos='rho', vert=False, spval=1e37):
"""
lonslice, z, lon, lat = lonslice(var, longitude, grd)
optional switch:
- Cpos='rho', 'u' or 'v' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
- spval special value
- rtol tolerance parameter
return a longitudinal slice along longitude=longitude from 3D variable var
lon, lat and z contain the C-grid position of the section for plotting.
If vert=True, lon, lat and z contain contain the position of the
verticies (to be used with pcolor)
Returns a longitudinal slice of the grid
"""
if Cpos == 'u':
lon = grd.hgrid.lon_u
lat = grd.hgrid.lat_u
elif Cpos == 'v':
lon = grd.hgrid.lon_v
lat = grd.hgrid.lat_v
elif Cpos == 'rho':
lon = grd.hgrid.lon_rho
lat = grd.hgrid.lat_rho
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
edge = np.concatenate((lon[1,1:-1], \
lon[1:-1,-2], \
lon[-2,-2:0:-1], \
lon[-2:0:-1,1]))
idx = np.concatenate((range(1,lon[0,:].shape[0]-1), \
range(1,lon[:,-1].shape[0]-1), \
range(1,lon[-1,::-1].shape[0]-1)[::-1], \
range(1,lon[::-1,0].shape[0]-1)[::-1]))
d = np.zeros(edge.shape)
for i in range (edge.shape[0]):
d[i] = edge[i] - longitude
d[i] = d[i] / abs(d[i])
d = np.diff(d)
pt_idx = np.where(d != 0)[0]
Mp, Lp = lon.shape
if len(pt_idx) != 2:
raise ValueError, 'this function only works for simple quadrangle'
# determine is latitude ligne is crossing a i or j edge
side = np.zeros(2)
if pt_idx[0] < Lp: side[0] = 1
if pt_idx[0] >= Lp and pt_idx[0] < Lp+Mp: side[0] = 2
if pt_idx[0] >= Lp+Mp and pt_idx[0] < Lp+Mp+Lp: side[0] = 3
if pt_idx[0] >= Lp+Mp+Lp: side[0] = 4
if pt_idx[1] < Lp: side[1] = 1
if pt_idx[1] >= Lp and pt_idx[1] < Lp+Mp: side[1] = 2
if pt_idx[1] >= Lp+Mp and pt_idx[1] < Lp+Mp+Lp: side[1] = 3
if pt_idx[1] >= Lp+Mp+Lp: side[1] = 4
if side[0] == 1 and side[1] == 2:
lonslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], Lp-2, \
1, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 1 and side[1] == 3:
lonslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], idx[pt_idx[1]], \
1, Mp-2, \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 1 and side[1] == 4:
lonslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], 1, \
1, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 2 and side[1] == 3:
lonslice, z, lon, lat = pyroms.tools.section(var, \
Lp-2, idx[pt_idx[1]], \
idx[pt_idx[0]], Mp-2, \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 2 and side[1] == 4:
lonslice, z, lon, lat = pyroms.tools.section(var, \
Lp-2, 1, \
idx[pt_idx[0]], idx[pt_idx[0]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 3 and side[1] == 4:
lonslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], 1, \
Mp-2, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
return lonslice, z, lon, lat
def latslice(var, latitude, grd, Cpos='rho', vert=False, spval=1e37):
"""
latslice, z, lon, lat = latslice(var, latitude, grd)
optional switch:
- Cpos='rho', 'u' or 'v' specify the C-grid position where
the variable rely
- vert=True/False If True, return the position of
the verticies
- spval special value
- rtol tolerance parameter
return a latitudinal slice along latitude=latitude from 3D variable var
lon, lat and z contain the C-grid position of the section for plotting.
If vert=True, lon, lat and z contain contain the position of the
verticies (to be used with pcolor)
Returns a longitudinal slice of the grid
"""
if Cpos == 'u':
lon = grd.hgrid.lon_u
lat = grd.hgrid.lat_u
elif Cpos == 'v':
lon = grd.hgrid.lon_v
lat = grd.hgrid.lat_v
elif Cpos == 'rho':
lon = grd.hgrid.lon_rho
lat = grd.hgrid.lat_rho
else:
raise Warning, '%s bad position. Valid Arakawa-C are \
rho, u or v.' % Cpos
edge = np.concatenate((lat[1,1:-1], \
lat[1:-1,-2], \
lat[-2,-2:0:-1], \
lat[-2:0:-1,1]))
idx = np.concatenate((range(1,lat[0,:].shape[0]-1), \
range(1,lat[:,-1].shape[0]-1), \
range(1,lat[-1,::-1].shape[0]-1)[::-1], \
range(1,lat[::-1,0].shape[0]-1)[::-1]))
d = np.zeros(edge.shape)
for i in range (edge.shape[0]):
d[i] = edge[i] - latitude
d[i] = d[i] / abs(d[i])
d = np.diff(d)
pt_idx = np.where(d != 0)[0]
Mp, Lp = lon.shape
if len(pt_idx) != 2:
raise ValueError, 'this function only works for simple quadrangle'
# determine is latitude ligne is crossing a i or j edge
side = np.zeros(2)
if pt_idx[0] < Lp: side[0] = 1
if pt_idx[0] >= Lp and pt_idx[0] < Lp+Mp: side[0] = 2
if pt_idx[0] >= Lp+Mp and pt_idx[0] < Lp+Mp+Lp: side[0] = 3
if pt_idx[0] >= Lp+Mp+Lp: side[0] = 4
if pt_idx[1] < Lp: side[1] = 1
if pt_idx[1] >= Lp and pt_idx[1] < Lp+Mp: side[1] = 2
if pt_idx[1] >= Lp+Mp and pt_idx[1] < Lp+Mp+Lp: side[1] = 3
if pt_idx[1] >= Lp+Mp+Lp: side[1] = 4
if side[0] == 1 and side[1] == 2:
latslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], Lp-2, \
1, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 1 and side[1] == 3:
latslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], idx[pt_idx[1]], \
1, Mp-2, \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 1 and side[1] == 4:
latslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], 1, \
1, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 2 and side[1] == 3:
latslice, z, lon, lat = pyroms.tools.section(var, \
Lp-2, idx[pt_idx[1]], \
idx[pt_idx[0]], Mp-2, \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 2 and side[1] == 4:
latslice, z, lon, lat = pyroms.tools.section(var, \
Lp-2, 1, \
idx[pt_idx[0]], idx[pt_idx[0]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
elif side[0] == 3 and side[1] == 4:
latslice, z, lon, lat = pyroms.tools.section(var, \
idx[pt_idx[0]], 1, \
Mp-2, idx[pt_idx[1]], \
grd, Cpos=Cpos, vert=vert, spval=spval)
return latslice, z, lon, lat
def section_transport(u, v, istart, iend, jstart, jend, grd):
"""
transpu, transpv = section_transport(u, v, istart, iend, jstart, jend, grd)
compute the transport through the section defined between
the point P1 (istart,jstart) and P2 (iend, jend).
P1 and P2 are Arakawa-C psi points.
The transpot is positive right handside of the section.
"""
# Find the nearest point between P1 (imin,jmin) and P2 (imax, jmax)
# -----------------------------------------------------------------
# Initialization
i0=istart; j0=jstart; i1=iend; j1=jend
istart = float(istart); iend = float(iend)
jstart = float(jstart); jend = float(jend)
# Compute equation: j = aj i + bj
if istart != iend:
aj = (jend - jstart ) / (iend - istart)
bj = jstart - aj * istart
else:
aj=10000.
bj=0.
# Compute equation: i = ai j + bi
if jstart != jend:
ai = (iend - istart ) / ( jend - jstart )
bi = istart - ai * jstart
else:
ai=10000.
bi=0.
# Compute the integer pathway:
# Chose the strait line with the smallest slope
if (abs(aj) <= 1 ):
# Here, the best line is y(x)
print 'Here, the best line is y(x)'
# If i1 < i0 swap points and remember it has been swapped
if i1 < i0:
i = i0 ; j = j0
i0 = i1 ; j0 = j1
i1 = i ; j1 = j
norm = -1
else:
norm = 1
if j1 >= j0:
ist = 1; jst = 1
norm_u = 1; norm_v = -1
else:
ist = 1; jst = 0
norm_u = -1; norm_v = -1
near = []
# compute the nearest j point on the line crossing at i
for i in range(i0,i1+1):
j = aj*i + bj
near.append(i + round(j)*1j)
else:
# Here, the best line is x(y)
print 'Here, the best line is x(y)'
# If j1 < j0 swap points and remember it has been swapped
if j1 < j0:
i = i0 ; j = j0
i0 = i1 ; j0 = j1
i1 = i ; j1 = j
norm = -1
else:
norm = 1
if i1 >= i0:
ist = 1; jst = 1
norm_u = 1; norm_v = -1
else:
ist = 0; jst = 1
norm_u = 1; norm_v = 1
near = []
# compute the nearest i point on the line crossing at j
for j in range(j0,j1+1):
i = ai*j + bi
near.append(round(i) + j*1j)
# Look for intermediate points to be added
# -------------------------------------------------------------
inear = np.copy(near)
n = len(near)
nn=1
for k in range(1,n):
# distance between 2 neighbour points
d = abs(inear[k] - inear[k-1])
if ( d > 1 ):
# intermediate points required if d>1
neari = interm_pt(inear, k, ai, bi, aj, bj)
near.insert(nn,neari)
nn=nn+1
nn=nn+1
# Now extract the transport through a section
# -------------------------------------------
#get metrics
dx = grd.hgrid.dx
dy = grd.hgrid.dy
z_w = grd.vgrid.z_w[0,:]
# average z_w at Arakawa-C u points
zu = 0.5 * (z_w[:,:,:-1] + z_w[:,:,1:])
dzu = zu[1:,:,:] - zu[:-1,:,:]
# average z_w at Arakawa-C v points
zv = 0.5 * (z_w[:,:-1,:] + z_w[:,1:,:])
dzv = zv[1:,:,:] - zv[:-1,:,:]
#set u and v to zero where u and v are masked for the sum
for k in range(u.shape[0]):
u[k,:] = np.where(grd.hgrid.mask_u == 1, u[k,:], 0)
v[k,:] = np.where(grd.hgrid.mask_v == 1, v[k,:], 0)
n = len(near)
transpu = 0
transpv = 0
for l in range(0,n-1):
ii = int(np.real(near[l])); jj = int(np.imag(near[l]))
for k in range(0, dzu.shape[0]):
if np.real(near[l]) == np.real(near[l+1]):
trans = u[k, jj+jst, ii] * dy[jj+jst, ii] * \
dzu[k, jj+jst, ii] * norm_u * norm
transpu = transpu + trans
elif np.imag(near[l]) == np.imag(near[l+1]):
trans = v[k, jj, ii+ist] * dx[jj, ii+ist] * \
dzv[k, jj, ii+ist] * norm_v * norm
transpv = transpv + trans
return transpu, transpv
def interm_pt(pnear, pk, pai, pbi, paj, pbj):
### FIND THE BEST INTERMEDIATE POINT ON A PATHWAY
# -----------------------------
# pnear : vector of the position of the nearest point
# pk : current working index
# pai, pbi: slope and original ordinate of x(y)
# paj, pbj: slope and original ordinate of y(x)
# pneari : vector holding the position of intermediate point
# -----------------------------
# 1 - Compute intermediate point
# Determine whether we use y(x) or x(y):
if (abs(paj) <= 1):
# y(x)
# possible intermediate point
ylptmp1 = pnear[pk-1] + 1
ylptmp2 = pnear[pk-1] + (paj/abs(paj))*1j
# M is the candidate point:
zxm = real(ylptmp1)
zym = imag(ylptmp1)
za0 = paj
zb0 = pbj
#
za1 = -1./za0
zb1 = zym-za1*zxm
# P is the projection of M in the strait line
zxp = -(zb1-zb0)/(za1-za0)
zyp = za0*zxp+zb0
# zd1 is the distance MP
zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp)
#
# M is the candidate point:
zxm = real(ylptmp2)
zym = imag(ylptmp2)
za1 = -1./za0
zb1 = zym-za1*zxm
# P is the projection of M in the strait line
zxp = -(zb1-zb0)/(za1-za0)
zyp = za0*zxp+zb0
# zd1 is the distance MP
zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp)
#
# choose the smallest (zd1,zd2)
if (zd2 <= zd1):
pneari = ylptmp2
else:
pneari = ylptmp1
#
else:
# x(y)
ylptmp1 = pnear[pk-1] + (pai/abs(pai))
ylptmp2 = pnear[pk-1] + 1*1j
# M is the candidate point:
zxm = real(ylptmp1)
zym = imag(ylptmp1)
za0 = pai
zb0 = pbi
#
za1 = -1./za0
zb1 = zxm-za1*zym
# P is the projection of M in the strait line
zyp = -(zb1-zb0)/(za1-za0)
zxp = za0*zyp+zb0
# zd1 is the distance MP
zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp)
#
# M is the candidate point:
zxm = real(ylptmp2)
zym = imag(ylptmp2)
za1 = -1./za0
zb1 = zxm-za1*zym
# P is the projection of M in the strait line
zyp = -(zb1-zb0)/(za1-za0)
zxp = za0*zyp+zb0
# zd2 is the distance MP
zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp)
#
# choose the smallest (zd1,zd2)
if (zd2 <= zd1):
pneari = ylptmp2
else:
pneari = ylptmp1
return pneari
| 34.981611
| 94
| 0.472527
| 5,833
| 39,949
| 3.186868
| 0.061718
| 0.062403
| 0.035505
| 0.027113
| 0.791651
| 0.770617
| 0.752972
| 0.728334
| 0.715692
| 0.688579
| 0
| 0.040814
| 0.360309
| 39,949
| 1,141
| 95
| 35.01227
| 0.686598
| 0.102356
| 0
| 0.704828
| 0
| 0
| 0.01727
| 0
| 0
| 0
| 0
| 0
| 0.005517
| 0
| null | null | 0
| 0.004138
| null | null | 0.005517
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5da9c9c5fa784b4f5127cb5388acbe99fe7882b9
| 11,180
|
py
|
Python
|
networkapi/api_ip/views.py
|
vinicius-marinho/GloboNetworkAPI
|
94651d3b4dd180769bc40ec966814f3427ccfb5b
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/api_ip/views.py
|
leopoldomauricio/GloboNetworkAPI
|
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/api_ip/views.py
|
shildenbrand/GloboNetworkAPI
|
515d5e961456cee657c08c275faa1b69b7452719
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
import logging
from django.db.transaction import commit_on_success
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from networkapi.api_ip import facade
from networkapi.api_ip import serializers
from networkapi.api_ip import tasks
from networkapi.api_ip.permissions import Read
from networkapi.api_ip.permissions import Write
from networkapi.api_ip.permissions import write_objv4_permission
from networkapi.api_ip.permissions import write_objv6_permission
from networkapi.settings import SPECS
from networkapi.util.classes import CustomAPIView
from networkapi.util.decorators import logs_method_apiview
from networkapi.util.decorators import permission_classes_apiview
from networkapi.util.decorators import permission_obj_apiview
from networkapi.util.decorators import prepare_search
from networkapi.util.geral import render_to_json
from networkapi.util.json_validate import json_validate
from networkapi.util.json_validate import raise_json_validate
log = logging.getLogger(__name__)
class IPv4View(CustomAPIView):
@logs_method_apiview
@raise_json_validate('')
@permission_classes_apiview((IsAuthenticated, Read))
@prepare_search
def get(self, request, *args, **kwargs):
"""Returns a list of vip request by ids ou dict."""
if not kwargs.get('obj_ids'):
obj_model = facade.get_ipv4_by_search(self.search)
ips = obj_model['query_set']
only_main_property = False
else:
obj_ids = kwargs.get('obj_ids').split(';')
ips = facade.get_ipv4_by_ids(obj_ids)
only_main_property = True
obj_model = None
# serializer ips
serializer_ip = serializers.Ipv4V3Serializer(
ips,
many=True,
fields=self.fields,
include=self.include,
exclude=self.exclude,
kind=self.kind
)
# prepare serializer with customized properties
data = render_to_json(
serializer_ip,
main_property='ips',
obj_model=obj_model,
request=request,
only_main_property=only_main_property
)
return Response(data, status=status.HTTP_200_OK)
@logs_method_apiview
@raise_json_validate('ipv4_post')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def post(self, request, *args, **kwargs):
"""Create Ipv4."""
ips = request.DATA
json_validate(SPECS.get('ipv4_post')).validate(ips)
response = list()
for ip in ips['ips']:
ret = facade.create_ipv4(ip, request.user)
response.append({'id': ret.id})
return Response(response, status=status.HTTP_201_CREATED)
@logs_method_apiview
@raise_json_validate('ipv4_put')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def put(self, request, *args, **kwargs):
"""Edit Ipv4."""
ips = request.DATA
json_validate(SPECS.get('ipv4_put')).validate(ips)
response = list()
for ip in ips['ips']:
ret = facade.update_ipv4(ip, request.user)
response.append({'id': ret.id})
return Response(response, status=status.HTTP_200_OK)
@logs_method_apiview
@raise_json_validate('')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def delete(self, request, *args, **kwargs):
"""Delete Ipv4."""
obj_ids = kwargs['obj_ids'].split(';')
for obj_id in obj_ids:
facade.delete_ipv4(obj_id)
return Response({}, status=status.HTTP_200_OK)
class IPv4AsyncView(CustomAPIView):
@logs_method_apiview
@raise_json_validate('ipv4_post')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def post(self, request, *args, **kwargs):
"""Create Ipv4."""
response = list()
ips = request.DATA
json_validate(SPECS.get('ipv4_post')).validate(ips)
user = request.user
for ip in ips['ips']:
task_obj = tasks.create_ipv4.apply_async(args=[ip, user.id],
queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
@logs_method_apiview
@raise_json_validate('ipv4_put')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def put(self, request, *args, **kwargs):
"""Edit Ipv4."""
response = list()
ips = request.DATA
json_validate(SPECS.get('ipv4_put')).validate(ips)
user = request.user
for ip in ips['ips']:
task_obj = tasks.update_ipv4.apply_async(args=[ip, user.id],
queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
@logs_method_apiview
@raise_json_validate('')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv4_permission])
@commit_on_success
def delete(self, request, *args, **kwargs):
"""Delete Ipv4."""
response = list()
obj_ids = kwargs['obj_ids'].split(';')
user = request.user
for obj_id in obj_ids:
task_obj = tasks.delete_ipv4.apply_async(
args=[obj_id, user.id], queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
class IPv6View(CustomAPIView):
@logs_method_apiview
@permission_classes_apiview((IsAuthenticated, Read))
@prepare_search
def get(self, request, *args, **kwargs):
if not kwargs.get('obj_ids'):
obj_model = facade.get_ipv6_by_search(self.search)
ips = obj_model['query_set']
only_main_property = False
else:
obj_ids = kwargs.get('obj_ids').split(';')
ips = facade.get_ipv6_by_ids(obj_ids)
only_main_property = True
obj_model = None
# serializer ips
serializer_ip = serializers.Ipv6V3Serializer(
ips,
many=True,
fields=self.fields,
include=self.include,
exclude=self.exclude,
kind=self.kind
)
# prepare serializer with customized properties
data = render_to_json(
serializer_ip,
main_property='ips',
obj_model=obj_model,
request=request,
only_main_property=only_main_property
)
return Response(data, status=status.HTTP_200_OK)
@logs_method_apiview
@raise_json_validate('ipv6_post')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def post(self, request, *args, **kwargs):
"""Save Ipv6."""
ips = request.DATA
json_validate(SPECS.get('ipv6_post')).validate(ips)
response = list()
for ip in ips['ips']:
ret = facade.create_ipv6(ip, request.user)
response.append({'id': ret.id})
return Response(response, status=status.HTTP_201_CREATED)
@logs_method_apiview
@raise_json_validate('ipv6_put')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def put(self, request, *args, **kwargs):
"""Edit Ipv6."""
ips = request.DATA
json_validate(SPECS.get('ipv6_put')).validate(ips)
response = list()
for ip in ips['ips']:
ret = facade.update_ipv6(ip, request.user)
response.append({'id': ret.id})
return Response(response, status=status.HTTP_200_OK)
@logs_method_apiview
@raise_json_validate('')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def delete(self, request, *args, **kwargs):
"""Edit Ipv6"""
obj_ids = kwargs['obj_ids'].split(';')
for obj_id in obj_ids:
facade.delete_ipv6(obj_id)
return Response({}, status=status.HTTP_200_OK)
class IPv6AsyncView(CustomAPIView):
@logs_method_apiview
@raise_json_validate('ipv6_post')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def post(self, request, *args, **kwargs):
"""Create Ipv6."""
response = list()
ips = request.DATA
json_validate(SPECS.get('ipv6_post')).validate(ips)
user = request.user
for ip in ips['ips']:
task_obj = tasks.create_ipv6.apply_async(args=[ip, user.id],
queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
@logs_method_apiview
@raise_json_validate('ipv6_put')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def put(self, request, *args, **kwargs):
"""Edit Ipv6."""
response = list()
ips = request.DATA
json_validate(SPECS.get('ipv6_put')).validate(ips)
user = request.user
for ip in ips['ips']:
task_obj = tasks.update_ipv6.apply_async(args=[ip, user.id],
queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
@logs_method_apiview
@raise_json_validate('')
@permission_classes_apiview((IsAuthenticated, Write))
@permission_obj_apiview([write_objv6_permission])
@commit_on_success
def delete(self, request, *args, **kwargs):
"""Delete Ipv6."""
response = list()
obj_ids = kwargs['obj_ids'].split(';')
user = request.user
for obj_id in obj_ids:
task_obj = tasks.delete_ipv6.apply_async(
args=[obj_id, user.id], queue='napi.network')
task = {
'task_id': task_obj.id
}
response.append(task)
return Response(response, status=status.HTTP_202_ACCEPTED)
| 30.630137
| 74
| 0.628801
| 1,270
| 11,180
| 5.255118
| 0.098425
| 0.044951
| 0.038208
| 0.08181
| 0.905304
| 0.888972
| 0.865448
| 0.825892
| 0.825892
| 0.825292
| 0
| 0.013302
| 0.267084
| 11,180
| 364
| 75
| 30.714286
| 0.801196
| 0.029785
| 0
| 0.794677
| 0
| 0
| 0.034147
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053232
| false
| 0
| 0.079848
| 0
| 0.201521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dad995271d02fed2afee3b2c0df478d245a424c
| 6,793
|
py
|
Python
|
data/transcoder_evaluation_gfg/python/PROGRAM_WORST_FIT_ALGORITHM_MEMORY_MANAGEMENT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 241
|
2021-07-20T08:35:20.000Z
|
2022-03-31T02:39:08.000Z
|
data/transcoder_evaluation_gfg/python/PROGRAM_WORST_FIT_ALGORITHM_MEMORY_MANAGEMENT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 49
|
2021-07-22T23:18:42.000Z
|
2022-03-24T09:15:26.000Z
|
data/transcoder_evaluation_gfg/python/PROGRAM_WORST_FIT_ALGORITHM_MEMORY_MANAGEMENT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 71
|
2021-07-21T05:17:52.000Z
|
2022-03-29T23:49:28.000Z
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( blockSize , m , processSize , n ) :
allocation = [ - 1 ] * n
for i in range ( n ) :
wstIdx = - 1
for j in range ( m ) :
if blockSize [ j ] >= processSize [ i ] :
if wstIdx == - 1 :
wstIdx = j
elif blockSize [ wstIdx ] < blockSize [ j ] :
wstIdx = j
if wstIdx != - 1 :
allocation [ i ] = wstIdx
blockSize [ wstIdx ] -= processSize [ i ]
print ( "Process No.Process Size Block no." )
for i in range ( n ) :
print ( i + 1 , " " , processSize [ i ] , end = " " )
if allocation [ i ] != - 1 :
print ( allocation [ i ] + 1 )
else :
print ( "Not Allocated" )
#TOFILL
if __name__ == '__main__':
param = [
([2, 3, 4, 4, 6, 9, 9, 9, 14, 16, 16, 20, 28, 29, 31, 34, 37, 39, 46, 51, 54, 60, 63, 63, 65, 65, 66, 68, 70, 75, 80, 83, 88, 90, 91, 96, 98],33,[2, 4, 6, 9, 10, 13, 14, 16, 20, 21, 33, 34, 35, 36, 43, 44, 49, 51, 51, 52, 55, 58, 59, 60, 65, 66, 66, 74, 78, 79, 83, 88, 88, 91, 94, 95, 97],31,),
([-44, -62, 0, 42, 78, 2, 88, 84, 48, -72, 76, -76, -42, 4, -56, -52, 46, -74, 66, 94, 32, -38, -36, 68, -70, 76, 32, -4, -48, -68, 96, 84, 20, 66, 86, 34, -8, -6, -84, 56, 50, 40, 70, 10, -2, 46, -10, -54],47,[-18, 38, -82, -54, -32, 32, 76, 54, 88, -50, -10, -66, 42, 34, -56, -38, -82, -4, 52, -78, 82, -32, 72, 84, 98, 88, -2, 60, 78, 68, -84, -58, 78, -98, -14, -20, -60, -94, 20, -48, 26, 2, -40, 76, -38, 38, -10, -52],25,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],30,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],29,),
([50, 57, 42, 88, 13, 71, 49],3,[54, 56, 23, 71, 73, 58, 89],5,),
([-96, -94, -92, -84, -82, -80, -80, -76, -76, -72, -58, -46, -44, -40, -34, -24, -18, -16, -16, -10, -10, 2, 8, 10, 14, 20, 26, 36, 38, 42, 54, 54, 62, 64, 64, 64, 72, 78, 84, 86, 88, 94, 98],34,[-98, -92, -92, -88, -82, -76, -58, -58, -54, -50, -44, -32, -30, -28, -28, -18, -4, -4, 0, 2, 6, 10, 10, 14, 20, 22, 24, 26, 30, 38, 48, 48, 52, 62, 66, 78, 82, 88, 90, 92, 94, 94, 98],40,),
([0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0],43,[0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1],42,),
([10, 15, 19, 22, 34, 35, 36, 38, 38, 40, 42, 42, 43, 48, 48, 49, 50, 54, 56, 60, 69, 76, 77, 79, 79, 80, 82, 85, 90, 95, 96, 98, 98, 99],32,[2, 5, 6, 7, 8, 11, 11, 12, 19, 20, 25, 28, 33, 34, 42, 46, 46, 55, 59, 62, 63, 65, 66, 67, 70, 74, 76, 80, 81, 82, 86, 89, 95, 99],19,),
([-52, 56, -48, 38, 84, -20, -32, -38, 8, -36, -66, -64, -66, -56, -74, -30, 58, 22, -2, -50, -90, -4, -2, 44, -60, -22, 74, 38, -50, 30, 94, -32, 94, 44, 90, -82, -42, -24, 46, 10, -34, -8, 98, -14, -42, 84, -8, 54],42,[58, 88, 16, -18, 24, 14, -26, -28, -16, -88, -56, 32, 24, 22, 46, 48, 50, 0, 6, 84, 88, -50, -4, 14, -80, 42, 18, 2, 90, -8, -32, -2, 46, -30, -46, 32, 70, -46, -18, 2, 36, -2, -60, -30, 48, -14, 76, -96],43,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],22,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],19,),
([75],0,[29],0,)
]
filled_function_param = [
([2, 3, 4, 4, 6, 9, 9, 9, 14, 16, 16, 20, 28, 29, 31, 34, 37, 39, 46, 51, 54, 60, 63, 63, 65, 65, 66, 68, 70, 75, 80, 83, 88, 90, 91, 96, 98],33,[2, 4, 6, 9, 10, 13, 14, 16, 20, 21, 33, 34, 35, 36, 43, 44, 49, 51, 51, 52, 55, 58, 59, 60, 65, 66, 66, 74, 78, 79, 83, 88, 88, 91, 94, 95, 97],31,),
([-44, -62, 0, 42, 78, 2, 88, 84, 48, -72, 76, -76, -42, 4, -56, -52, 46, -74, 66, 94, 32, -38, -36, 68, -70, 76, 32, -4, -48, -68, 96, 84, 20, 66, 86, 34, -8, -6, -84, 56, 50, 40, 70, 10, -2, 46, -10, -54],47,[-18, 38, -82, -54, -32, 32, 76, 54, 88, -50, -10, -66, 42, 34, -56, -38, -82, -4, 52, -78, 82, -32, 72, 84, 98, 88, -2, 60, 78, 68, -84, -58, 78, -98, -14, -20, -60, -94, 20, -48, 26, 2, -40, 76, -38, 38, -10, -52],25,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],30,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],29,),
([50, 57, 42, 88, 13, 71, 49],3,[54, 56, 23, 71, 73, 58, 89],5,),
([-96, -94, -92, -84, -82, -80, -80, -76, -76, -72, -58, -46, -44, -40, -34, -24, -18, -16, -16, -10, -10, 2, 8, 10, 14, 20, 26, 36, 38, 42, 54, 54, 62, 64, 64, 64, 72, 78, 84, 86, 88, 94, 98],34,[-98, -92, -92, -88, -82, -76, -58, -58, -54, -50, -44, -32, -30, -28, -28, -18, -4, -4, 0, 2, 6, 10, 10, 14, 20, 22, 24, 26, 30, 38, 48, 48, 52, 62, 66, 78, 82, 88, 90, 92, 94, 94, 98],40,),
([0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0],43,[0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1],42,),
([10, 15, 19, 22, 34, 35, 36, 38, 38, 40, 42, 42, 43, 48, 48, 49, 50, 54, 56, 60, 69, 76, 77, 79, 79, 80, 82, 85, 90, 95, 96, 98, 98, 99],32,[2, 5, 6, 7, 8, 11, 11, 12, 19, 20, 25, 28, 33, 34, 42, 46, 46, 55, 59, 62, 63, 65, 66, 67, 70, 74, 76, 80, 81, 82, 86, 89, 95, 99],19,),
([-52, 56, -48, 38, 84, -20, -32, -38, 8, -36, -66, -64, -66, -56, -74, -30, 58, 22, -2, -50, -90, -4, -2, 44, -60, -22, 74, 38, -50, 30, 94, -32, 94, 44, 90, -82, -42, -24, 46, 10, -34, -8, 98, -14, -42, 84, -8, 54],42,[58, 88, 16, -18, 24, 14, -26, -28, -16, -88, -56, 32, 24, 22, 46, 48, 50, 0, 6, 84, 88, -50, -4, 14, -80, 42, 18, 2, 90, -8, -32, -2, 46, -30, -46, 32, 70, -46, -18, 2, 36, -2, -60, -30, 48, -14, 76, -96],43,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],22,[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],19,),
([75],0,[29],0,)
]
n_success = 0
for i, parameters_set in enumerate(param):
f_filled(*(filled_function_param[i]))
f_gold(*parameters_set)
if parameters_set == filled_function_param[i]:
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
| 109.564516
| 435
| 0.426763
| 1,559
| 6,793
| 1.844772
| 0.097498
| 0.164117
| 0.214882
| 0.253129
| 0.780946
| 0.772601
| 0.772601
| 0.772601
| 0.772601
| 0.772601
| 0
| 0.457537
| 0.284116
| 6,793
| 62
| 436
| 109.564516
| 0.133868
| 0.027234
| 0
| 0.461538
| 0
| 0
| 0.012725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0
| 0
| 0.019231
| 0.096154
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5dc03aa4a757d91e8c3c10e5797acbcf76d6f05c
| 6,653
|
py
|
Python
|
Python/klampt/model/create/primitives.py
|
smeng9/Klampt
|
7ff91bead90ac04280eff310623338fd10aaba79
|
[
"BSD-3-Clause"
] | null | null | null |
Python/klampt/model/create/primitives.py
|
smeng9/Klampt
|
7ff91bead90ac04280eff310623338fd10aaba79
|
[
"BSD-3-Clause"
] | null | null | null |
Python/klampt/model/create/primitives.py
|
smeng9/Klampt
|
7ff91bead90ac04280eff310623338fd10aaba79
|
[
"BSD-3-Clause"
] | null | null | null |
"""Utilities for creating geometric primitives (and world entities made out
of them).
"""
from klampt import Geometry3D,GeometricPrimitive
from klampt.math import vectorops
def box(width,depth,height,center=None,R=None,t=None,world=None,name=None,mass=float('inf'),type='TriangleMesh'):
"""Makes a box with dimensions width x depth x height. The box is centered
at (0,0,0) by default.
Args:
width,depth,height (float): x,y,z dimensions of the box
center (list of 3 floats, optional): if None (typical),
the *geometry* of the box is centered at 0. Otherwise,
the *geometry* of the box is shifted relative to the
box's local coordinate system.
R,t (se3 transform, optional): if given, the box's world coordinates
will be rotated and shifted by this transform.
world (WorldModel, optional): If given, then the box will be a
RigidObjectModel or TerrainModel will be created in this world
name (str, optional): If world is given, this is the name of the object.
Default is 'box'.
mass (float, optional): If world is given and this is inf, then a
TerrainModel will be created. Otherwise, a RigidObjectModel
will be created with automatically determined inertia.
type (str, optional): the geometry type. Defaults to 'TriangleMesh',
but also 'GeometricPrimitive' and 'VolumeGrid' are accepted.
Returns:
Geometry3D, RigidObjectModel, or TerrainModel: A representation
of the box. If a world is given, then either a RigidObjectModel
or TerrainModel is added to the world and returned.
"""
if center is None:
center = [0,0,0]
prim = GeometricPrimitive()
prim.setAABB([center[0]-width*0.5,center[1]-depth*0.5,center[2]-height*0.5],[center[0]+width*0.5,center[1]+depth*0.5,center[2]+height*0.5])
geom = Geometry3D(prim)
if type != 'GeometricPrimitive':
geom = geom.convert(type)
if world is None:
if R is not None and t is not None:
geom.setCurrentTransform(R,t)
return geom
#want a RigidObjectModel or TerrainModel
if name is None:
name = 'box'
if mass != float('inf'):
bmass = Mass()
bmass.setMass(mass)
bmass.setCom(center)
bmass.setInertia([mass*(depth**2+height**2)/12,mass*(width**2+height**2)/12,mass*(width**2+height**2)/12])
robj = world.makeRigidObject(name)
robj.geometry().set(geom)
robj.setMass(bmass)
if R is not None and t is not None:
robj.setTransform(R,t)
return robj
else:
tobj = world.makeTerrain(name)
if R is not None and t is not None:
geom.transform(R,t)
tobj.geometry().set(geom)
return tobj
def sphere(radius,center=None,R=None,t=None,world=None,name=None,mass=float('inf'),type='TriangleMesh'):
"""Makes a sphere with the given radius
Args:
radius (float): radius of the sphere
center (list of 3 floats, optional): if None (typical), the *geometry*
of the sphere is centered at 0. Otherwise, the *geometry* of
the sphere is shifted relative to the sphere's local coordinate system.
R,t (se3 transform, optional): if given, the sphere's world coordinates
will be rotated and shifted by this transform.
world (WorldModel, optional): If given, then the sphere will be a
RigidObjectModel or TerrainModel will be created in this world
name (str, optional): If world is given, this is the name of the object.
Default is 'sphere'.
mass (float, optional): If world is given and this is inf, then a
TerrainModel will be created. Otherwise, a RigidObjectModel
will be created with automatically determined inertia.
type (str, optional): the geometry type. Defaults to 'TriangleMesh',
but also 'GeometricPrimitive' and 'VolumeGrid' are accepted.
Returns:
Geometry3D, RigidObjectModel, or TerrainModel: A representation
of the sphere. If a world is given, then either a RigidObjectModel
or TerrainModel is added to the world and returned.
"""
if center is None:
center = [0,0,0]
prim = GeometricPrimitive()
prim.setSphere(center,radius)
geom = Geometry3D(prim)
if type != 'GeometricPrimitive':
geom = geom.convert(type)
if world is None:
if R is not None and t is not None:
geom.setCurrentTransform(R,t)
return geom
#want a RigidObjectModel or TerrainModel
if name is None:
name = 'sphere'
if mass != float('inf'):
bmass = Mass()
bmass.setMass(mass)
bmass.setCom(center)
bmass.setInertia([0.4*mass*radius**2]*3)
robj = world.makeRigidObject(name)
robj.geometry().set(geom)
robj.setMass(bmass)
if R is not None and t is not None:
robj.setTransform(R,t)
return robj
else:
tobj = world.makeTerrain(name)
if R is not None and t is not None:
geom.transform(R,t)
tobj.geometry().set(geom)
return tobj
def bbox(bmin,bmax,R=None,t=None,world=None,name=None,mass=float('inf'),type='TriangleMesh'):
"""Makes a box from bounds [bmin,bmax].
Args:
bmin (list of 3 floats): the lower corner of the box
center (list of 3 floats): the upper corner of the box
R,t (se3 transform, optional): if given, the box's world coordinates
will be rotated and shifted by this transform.
world (WorldModel, optional): If given, then the box will be a
RigidObjectModel or TerrainModel will be created in this world
name (str, optional): If world is given, this is the name of the object.
Default is 'box'.
mass (float, optional): If world is given and this is inf, then a
TerrainModel will be created. Otherwise, a RigidObjectModel
will be created with automatically determined inertia.
type (str, optional): the geometry type. Defaults to 'TriangleMesh',
but also 'GeometricPrimitive' and 'VolumeGrid' are accepted.
Returns:
Geometry3D, RigidObjectModel, or TerrainModel: A representation
of the box. If a world is given, then either a RigidObjectModel
or TerrainModel is added to the world and returned.
"""
w,d,h = vectorops.sub(bmax,bmin)
center = vectorops.interpolate(bmin,bmax,0.5)
return box(w,d,h,center,R,t,world,name,mass,type)
| 43.769737
| 143
| 0.643319
| 925
| 6,653
| 4.627027
| 0.144865
| 0.021028
| 0.025234
| 0.057944
| 0.867056
| 0.853271
| 0.843692
| 0.843692
| 0.835514
| 0.817757
| 0
| 0.012325
| 0.2683
| 6,653
| 151
| 144
| 44.059603
| 0.866886
| 0.568014
| 0
| 0.794118
| 0
| 0
| 0.03698
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0
| 0.029412
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dda688d98ac3e9edafc06f10c087f7130c1f259
| 2,974
|
py
|
Python
|
tests/test_helper.py
|
arraystream/fftoptionlib
|
093792388024f1182f4fa1cbff779c7ec657f1f9
|
[
"BSD-3-Clause"
] | 49
|
2017-04-17T15:23:20.000Z
|
2021-12-28T04:55:17.000Z
|
tests/test_helper.py
|
ran404/fftoptionlib
|
8ac67305b291584baa64b8050a0462d8aacf0fd5
|
[
"BSD-3-Clause"
] | 6
|
2017-04-19T19:28:03.000Z
|
2020-08-13T19:27:45.000Z
|
tests/test_helper.py
|
ran404/fftoptionlib
|
8ac67305b291584baa64b8050a0462d8aacf0fd5
|
[
"BSD-3-Clause"
] | 17
|
2017-04-17T15:23:06.000Z
|
2022-02-17T05:19:26.000Z
|
import unittest
import numpy as np
import numpy.testing as npt
from fftoptionlib.helper import (
to_array_atleast_1d,
to_array_with_same_dimension,
)
class HelperTest(unittest.TestCase):
def test_to_array_atleast_1d(self):
res = to_array_atleast_1d(1, 2, 3)
exp_res = np.array([1]), np.array([2]), np.array([3])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 1)
def test_to_array_atleast_1d_2(self):
res = to_array_atleast_1d(1, 2, None)
exp_res = np.array([1]), np.array([2]), np.array([None])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 1)
def test_to_array_atleast_1d_3(self):
res = to_array_atleast_1d([1, 2, 3], 2, None)
exp_res = np.array([1, 2, 3]), np.array([2]), np.array([None])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 3)
def test_to_array_with_same_dimension(self):
res = to_array_with_same_dimension([1, 2, 3], 2, None)
exp_res = np.array([1, 2, 3]), np.array([2, 2, 2]), np.array([None, None, None])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 3)
def test_to_array_with_same_dimension_2(self):
res = to_array_with_same_dimension(1, 2, 3)
exp_res = np.array([1]), np.array([2]), np.array([3])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 1)
def test_to_array_with_same_dimension_3(self):
res = to_array_with_same_dimension(np.array([1, 2, 3]), np.array(2), None)
exp_res = np.array([1, 2, 3]), np.array([2, 2, 2]), np.array([None, None, None])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
self.assertEqual(res[0].shape[0], 3)
def test_to_array_with_same_dimension_4(self):
self.assertRaises(ValueError, to_array_with_same_dimension, np.array([1, 2, 3]), [2, 3], None)
def test_to_array_with_same_dimension_5(self):
res = to_array_with_same_dimension([1, 2, 3], 'put', None)
exp_res = np.array([1, 2, 3]), np.array(['put', 'put', 'put']), np.array([None, None, None])
npt.assert_array_equal(res[0], exp_res[0])
npt.assert_array_equal(res[1], exp_res[1])
npt.assert_array_equal(res[2], exp_res[2])
| 42.485714
| 102
| 0.64156
| 511
| 2,974
| 3.44227
| 0.076321
| 0.095509
| 0.16714
| 0.226833
| 0.899375
| 0.883457
| 0.870381
| 0.836839
| 0.818647
| 0.792496
| 0
| 0.053512
| 0.195696
| 2,974
| 69
| 103
| 43.101449
| 0.681856
| 0
| 0
| 0.534483
| 0
| 0
| 0.004035
| 0
| 0
| 0
| 0
| 0
| 0.482759
| 1
| 0.137931
| false
| 0
| 0.068966
| 0
| 0.224138
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5d1f56f5dbf0327231f6ee0b7d4ab5b32c4440d7
| 339
|
py
|
Python
|
aoc_2018/day_9/python/test_day9_puzzle.py
|
girip11/advent_of_code
|
b58f9a53d219bb3bef284af6b0ca59d2addd57bb
|
[
"MIT"
] | null | null | null |
aoc_2018/day_9/python/test_day9_puzzle.py
|
girip11/advent_of_code
|
b58f9a53d219bb3bef284af6b0ca59d2addd57bb
|
[
"MIT"
] | 1
|
2021-09-29T04:20:48.000Z
|
2021-09-29T04:20:48.000Z
|
aoc_2018/day_9/python/test_day9_puzzle.py
|
girip11/advent_of_code
|
b58f9a53d219bb3bef284af6b0ca59d2addd57bb
|
[
"MIT"
] | null | null | null |
from aoc_2018.day_9.python.day9_puzzle import get_highest_score
def test_highest_score() -> None:
assert get_highest_score(10, 1618) == 8317
assert get_highest_score(13, 7999) == 146373
assert get_highest_score(17, 1104) == 2764
assert get_highest_score(21, 6111) == 54718
assert get_highest_score(30, 5807) == 37305
| 33.9
| 63
| 0.740413
| 52
| 339
| 4.5
| 0.596154
| 0.358974
| 0.384615
| 0.448718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211268
| 0.162242
| 339
| 9
| 64
| 37.666667
| 0.612676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 1
| 0.142857
| true
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d39a6e641bd3081c0a905e2c12730b7ef05d670
| 7,169
|
py
|
Python
|
test/integration/003_simple_reference_test/test_simple_reference.py
|
tjengel/dbt
|
f985902a002fba36f6f709c6aacf9ae20778e58c
|
[
"Apache-2.0"
] | 1
|
2021-09-01T20:50:52.000Z
|
2021-09-01T20:50:52.000Z
|
test/integration/003_simple_reference_test/test_simple_reference.py
|
tjengel/dbt
|
f985902a002fba36f6f709c6aacf9ae20778e58c
|
[
"Apache-2.0"
] | 1
|
2019-10-28T15:33:04.000Z
|
2019-10-28T15:33:04.000Z
|
test/integration/003_simple_reference_test/test_simple_reference.py
|
tjengel/dbt
|
f985902a002fba36f6f709c6aacf9ae20778e58c
|
[
"Apache-2.0"
] | 2
|
2019-05-10T21:23:08.000Z
|
2021-06-09T01:28:37.000Z
|
from test.integration.base import DBTIntegrationTest, use_profile
class TestSimpleReference(DBTIntegrationTest):
@property
def schema(self):
return "simple_reference_003"
@property
def models(self):
return "models"
@property
def project_config(self):
return {
'models': {
'vars': {
'var_ref': '{{ ref("view_copy") }}',
}
}
}
def setUp(self):
super().setUp()
# self.use_default_config()
self.run_sql_file("seed.sql")
@use_profile('postgres')
def test__postgres__simple_reference(self):
results = self.run_dbt()
# ephemeral_copy doesn't show up in results
self.assertEqual(len(results), 8)
# Copies should match
self.assertTablesEqual("seed","incremental_copy")
self.assertTablesEqual("seed","materialized_copy")
self.assertTablesEqual("seed","view_copy")
# Summaries should match
self.assertTablesEqual("summary_expected","incremental_summary")
self.assertTablesEqual("summary_expected","materialized_summary")
self.assertTablesEqual("summary_expected","view_summary")
self.assertTablesEqual("summary_expected","ephemeral_summary")
self.assertTablesEqual("summary_expected","view_using_ref")
self.run_sql_file("update.sql")
results = self.run_dbt()
self.assertEqual(len(results), 8)
# Copies should match
self.assertTablesEqual("seed","incremental_copy")
self.assertTablesEqual("seed","materialized_copy")
self.assertTablesEqual("seed","view_copy")
# Summaries should match
self.assertTablesEqual("summary_expected","incremental_summary")
self.assertTablesEqual("summary_expected","materialized_summary")
self.assertTablesEqual("summary_expected","view_summary")
self.assertTablesEqual("summary_expected","ephemeral_summary")
self.assertTablesEqual("summary_expected","view_using_ref")
@use_profile('snowflake')
def test__snowflake__simple_reference(self):
results = self.run_dbt()
self.assertEqual(len(results), 8)
# Copies should match
self.assertManyTablesEqual(
["SEED", "INCREMENTAL_COPY", "MATERIALIZED_COPY", "VIEW_COPY"],
["SUMMARY_EXPECTED", "INCREMENTAL_SUMMARY", "MATERIALIZED_SUMMARY", "VIEW_SUMMARY", "EPHEMERAL_SUMMARY"]
)
self.run_sql_file("update.sql")
results = self.run_dbt()
self.assertEqual(len(results), 8)
self.assertManyTablesEqual(
["SEED", "INCREMENTAL_COPY", "MATERIALIZED_COPY", "VIEW_COPY"],
["SUMMARY_EXPECTED", "INCREMENTAL_SUMMARY", "MATERIALIZED_SUMMARY", "VIEW_SUMMARY", "EPHEMERAL_SUMMARY"]
)
@use_profile('postgres')
def test__postgres__simple_reference_with_models(self):
# Run materialized_copy, ephemeral_copy, and their dependents
# ephemeral_copy should not actually be materialized b/c it is ephemeral
results = self.run_dbt(
['run', '--models', 'materialized_copy', 'ephemeral_copy']
)
self.assertEqual(len(results), 1)
# Copies should match
self.assertTablesEqual("seed","materialized_copy")
created_models = self.get_models_in_schema()
self.assertTrue('materialized_copy' in created_models)
@use_profile('postgres')
def test__postgres__simple_reference_with_models_and_children(self):
# Run materialized_copy, ephemeral_copy, and their dependents
# ephemeral_copy should not actually be materialized b/c it is ephemeral
# the dependent ephemeral_summary, however, should be materialized as a table
results = self.run_dbt(
['run', '--models', 'materialized_copy+', 'ephemeral_copy+']
)
self.assertEqual(len(results), 3)
# Copies should match
self.assertTablesEqual("seed","materialized_copy")
# Summaries should match
self.assertTablesEqual("summary_expected","materialized_summary")
self.assertTablesEqual("summary_expected","ephemeral_summary")
created_models = self.get_models_in_schema()
self.assertFalse('incremental_copy' in created_models)
self.assertFalse('incremental_summary' in created_models)
self.assertFalse('view_copy' in created_models)
self.assertFalse('view_summary' in created_models)
# make sure this wasn't errantly materialized
self.assertFalse('ephemeral_copy' in created_models)
self.assertTrue('materialized_copy' in created_models)
self.assertTrue('materialized_summary' in created_models)
self.assertEqual(created_models['materialized_copy'], 'table')
self.assertEqual(created_models['materialized_summary'], 'table')
self.assertTrue('ephemeral_summary' in created_models)
self.assertEqual(created_models['ephemeral_summary'], 'table')
@use_profile('snowflake')
def test__snowflake__simple_reference_with_models(self):
# Run materialized_copy & ephemeral_copy
# ephemeral_copy should not actually be materialized b/c it is ephemeral
results = self.run_dbt(
['run', '--models', 'materialized_copy', 'ephemeral_copy']
)
self.assertEqual(len(results), 1)
# Copies should match
self.assertTablesEqual("SEED", "MATERIALIZED_COPY")
created_models = self.get_models_in_schema()
self.assertTrue('MATERIALIZED_COPY' in created_models)
@use_profile('snowflake')
def test__snowflake__simple_reference_with_models_and_children(self):
# Run materialized_copy, ephemeral_copy, and their dependents
# ephemeral_copy should not actually be materialized b/c it is ephemeral
# the dependent ephemeral_summary, however, should be materialized as a table
results = self.run_dbt(
['run', '--models', 'materialized_copy+', 'ephemeral_copy+']
)
self.assertEqual(len(results), 3)
# Copies should match
self.assertManyTablesEqual(
["SEED", "MATERIALIZED_COPY"],
["SUMMARY_EXPECTED", "MATERIALIZED_SUMMARY", "EPHEMERAL_SUMMARY"]
)
created_models = self.get_models_in_schema()
self.assertFalse('INCREMENTAL_COPY' in created_models)
self.assertFalse('INCREMENTAL_SUMMARY' in created_models)
self.assertFalse('VIEW_COPY' in created_models)
self.assertFalse('VIEW_SUMMARY' in created_models)
# make sure this wasn't errantly materialized
self.assertFalse('EPHEMERAL_COPY' in created_models)
self.assertTrue('MATERIALIZED_COPY' in created_models)
self.assertTrue('MATERIALIZED_SUMMARY' in created_models)
self.assertEqual(created_models['MATERIALIZED_COPY'], 'table')
self.assertEqual(created_models['MATERIALIZED_SUMMARY'], 'table')
self.assertTrue('EPHEMERAL_SUMMARY' in created_models)
self.assertEqual(created_models['EPHEMERAL_SUMMARY'], 'table')
| 38.336898
| 116
| 0.6775
| 742
| 7,169
| 6.260108
| 0.117251
| 0.078364
| 0.065877
| 0.057266
| 0.917115
| 0.917115
| 0.911733
| 0.906351
| 0.872982
| 0.872982
| 0
| 0.001968
| 0.220393
| 7,169
| 186
| 117
| 38.543011
| 0.829129
| 0.14214
| 0
| 0.554622
| 0
| 0
| 0.266776
| 0
| 0
| 0
| 0
| 0
| 0.470588
| 1
| 0.084034
| false
| 0
| 0.008403
| 0.02521
| 0.12605
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d4dea77b587b706281aa278f72b5b6cf288a685
| 212
|
py
|
Python
|
src/app/beer_garden/api/http/handlers/vbeta/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 230
|
2018-02-03T01:33:45.000Z
|
2022-02-20T22:07:25.000Z
|
src/app/beer_garden/api/http/handlers/vbeta/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 961
|
2018-02-06T11:22:40.000Z
|
2022-03-24T15:22:33.000Z
|
src/app/beer_garden/api/http/handlers/vbeta/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 33
|
2018-02-04T18:00:07.000Z
|
2021-12-15T13:07:22.000Z
|
# flake8: noqa
import beer_garden.api.http.handlers.vbeta.chunk
import beer_garden.api.http.handlers.vbeta.event
import beer_garden.api.http.handlers.vbeta.file
import beer_garden.api.http.handlers.vbeta.runner
| 30.285714
| 49
| 0.839623
| 34
| 212
| 5.117647
| 0.382353
| 0.229885
| 0.367816
| 0.436782
| 0.827586
| 0.827586
| 0.827586
| 0
| 0
| 0
| 0
| 0.005
| 0.056604
| 212
| 6
| 50
| 35.333333
| 0.865
| 0.056604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
54fa1f75692720cb453180aa7669bb3762ccbdfe
| 229,329
|
py
|
Python
|
parser/team23/grammar/parsetab.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
parser/team23/grammar/parsetab.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
parser/team23/grammar/parsetab.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
# parsetab.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'leftPAR_ABREPAR_CIERRArightIGUALleftORleftANDleftNO_IGUALnonassocMAYORMENORMAYOR_IGUALMENOR_IGUALleftMASMENOSleftASTERISCODIVISIONMODULOleftPOTENCIArightNOTleftLLAVE_ABRELLAVE_CIERRAABS ACOS ACOSD ACOSH ADD ALL ALTER AND AS ASC ASIN ASIND ASINH ASTERISCO ATAN ATAN2 ATAN2D ATAND ATANH AVG BETWEEN BIGINT BOOLEAN BY BYTEA CADENA CASE CASTEO CBRT CEIL CEILING CHAR CHARACTER CHECK COLUMN COMA CONSTRAINT CONVERT CORCHE_ABRE CORCHE_CIERRA COS COSD COSH COT COTD COUNT CREATE CURRENT_DATE CURRENT_TIME CURRENT_USER DATABASE DATABASES DATE DATE_PART DAY DECIMAL DECIMAL_NUM DECODE DEFAULT DEGREES DELETE DESC DIFERENTE DISTINCT DIV DIVISION DOUBLE DROP ELSE ENCODE END ENTERO ENUM EXCEPT EXISTS EXP EXTRACT FACTORIAL FALSE FIELDS FIRST FLOOR FOREIGN FROM FULL GCD GET_BYTE GREATEST GROUP HAVING HOUR ID IF IGUAL ILIKE IN INHERITS INNER INSERT INTEGER INTERSECT INTERVAL INTO IS ISNULL JOIN KEY LAST LEAST LEFT LENGTH LIKE LIMIT LLAVE_ABRE LLAVE_CIERRA LN LOG MAS MAX MAYOR MAYOR_IGUAL MD5 MENOR MENOR_IGUAL MENOS MIN MINUTE MOD MODE MODULO MONEY MONTH NOT NOTNULL NOW NO_IGUAL NULL NULLS NUMERIC OFFSET OR ORDER OUTER OWNER PAR_ABRE PAR_CIERRA PI POTENCIA POWER PRECISION PRIMARY PUNTO PUNTOCOMA RADIANS RANDOM REAL REFERENCE REFERENCES RENAME REPLACE RIGHT ROUND SECOND SELECT SESSION_USER SET SET_BYTE SHA256 SHOW SIGN SIMILAR SIN SIND SINH SMALLINT SQRT SUBSTR SUBSTRING SUM SYMMETRIC TABLE TAN TAND TANH TEXT THEN TIME TIMESTAMP TO TRIM TRUE TRUNC TYPE UNION UNIQUE UNKNOWN UPDATE USE VALUES VARCHAR VARYING WHEN WHERE WIDTH_BUCKET WITH WITHOUT YEAR ZONEinit : instruccionesinstrucciones : instrucciones instruccioninstrucciones : instruccion instruccion : crear_statement PUNTOCOMA\n | alter_statement PUNTOCOMA\n | drop_statement PUNTOCOMA\n | seleccionar PUNTOCOMA\n | union PUNTOCOMA\n | intersect PUNTOCOMA\n | except PUNTOCOMAinstruccion : SHOW DATABASES PUNTOCOMA\n | INSERT INTO ID VALUES PAR_ABRE list_val PAR_CIERRA PUNTOCOMA\n | INSERT INTO ID PAR_ABRE list_id PAR_CIERRA VALUES PAR_ABRE list_val PAR_CIERRA PUNTOCOMA\n | UPDATE ID SET ID IGUAL expression where PUNTOCOMA\n | DELETE FROM ID WHERE expression PUNTOCOMA\n | DELETE FROM ID PUNTOCOMA\n | USE DATABASE ID PUNTOCOMAunion : PAR_ABRE seleccionar PAR_CIERRA UNION PAR_ABRE seleccionar PAR_CIERRAintersect : PAR_ABRE seleccionar PAR_CIERRA INTERSECT PAR_ABRE seleccionar PAR_CIERRAexcept : PAR_ABRE seleccionar PAR_CIERRA EXCEPT PAR_ABRE seleccionar PAR_CIERRAcrear_statement : CREATE TABLE ID PAR_ABRE contenido_tabla PAR_CIERRA inherits_statementcrear_statement : CREATE or_replace DATABASE if_not_exists ID owner_ mode_or_replace : OR REPLACE\n | if_not_exists : IF NOT EXISTS\n | owner_ : OWNER IGUAL ID\n | mode_ : MODE IGUAL ENTERO\n | alter_statement : ALTER DATABASE ID rename_owneralter_statement : ALTER TABLE ID alter_listalter_list : alter_list COMA alter_opalter_list : alter_oprename_owner : RENAME TO ID\n | OWNER TO LLAVE_ABRE ow_op LLAVE_CIERRAow_op : ID\n | CURRENT_USER\n | SESSION_USERdrop_statement : DROP DATABASE if_exists IDdrop_statement : DROP TABLE IDif_exists : IF EXISTS\n | contenido_tabla : contenido_tabla COMA manejo_tablacontenido_tabla : manejo_tablamanejo_tabla : declaracion_columna\n | condition_columndeclaracion_columna : ID type_column condition_column_rowdeclaracion_columna : ID type_columntype_column : SMALLINT\n | INTEGER\n\t | BIGINT\n\t | DECIMAL\n\t | NUMERIC\n\t | REAL\n\t | DOUBLE PRECISION\n\t | MONEY\n\t | VARCHAR PAR_ABRE ENTERO PAR_CIERRA\n | CHAR PAR_ABRE ENTERO PAR_CIERRA\n | CHARACTER PAR_ABRE ENTERO PAR_CIERRA\n | CHARACTER VARYING PAR_ABRE ENTERO PAR_CIERRA\n \t | TEXT\n\t | DATE\n | TIMESTAMP\n | TIMEcondition_column_row : condition_column_row condition_columncondition_column_row : condition_columncondition_column : constraint UNIQUE PAR_ABRE list_id PAR_CIERRA\n | constraint CHECK PAR_ABRE expression PAR_CIERRA\n | constraint key_tablecondition_column : key_table_row\n | key_tablecondition_column : DEFAULT expression\n | NOT NULL\n | UNIQUE PAR_ABRE list_id PAR_CIERRA\n | CHECK PAR_ABRE expression PAR_CIERRA\n \t\t | condition_column : constraint UNIQUE\n | UNIQUEkey_table_row : PRIMARY KEY\n | FOREIGN KEY REFERENCES ID PAR_ABRE ID PAR_CIERRAconstraint : CONSTRAINT ID\n | list_id : list_id COMA aliaslist_id : aliasalias : ID\n | alias_itemkey_table : PRIMARY KEY list_key\n\t | FOREIGN KEY list_key REFERENCES ID list_keylist_key : PAR_ABRE list_id PAR_CIERRA\n\t | alter_op : ADD condition_column\n\t | ALTER COLUMN ID alter_col_op\n\t | DROP alter_drop ID\n | RENAME TO IDalter_op : ADD COLUMN ID type_column condition_columnalter_drop : CONSTRAINT\n\t | COLUMN op_add : CHECK PAR_ABRE ID DIFERENTE CADENA PAR_CIERRA\n | CONSTRAINT ID UNIQUE PAR_ABRE ID PAR_CIERRA\n | key_table REFERENCES PAR_ABRE list_id PAR_CIERRAalter_col_op : SET NOT NULL\n | SET DEFAULT expression\n | TYPE type_columninherits_statement : INHERITS PAR_ABRE ID PAR_CIERRA\n | list_val : list_val COMA expressionlist_val : expressionwhere : WHERE expression\n | seleccionar : SELECT distinto select_list FROM list_id donde group_by order_by group_having limiteseleccionar : SELECT GREATEST expressiones\n | SELECT LEAST expressionesseleccionar : SELECT list_expression_fexpressiones : PAR_ABRE list_expression PAR_CIERRAexpressiones : list_expressiondistinto : DISTINCT\n\t | select_list : ASTERISCO\n\t | expressiones table_expression : expressionesdonde : WHERE expression\n | group_by : GROUP BY list_id\n | order_by : ORDER BY list_id asc_desc nulls_f_l\n | group_having : HAVING expressiones\n | asc_desc : ASC\n\t | DESCnulls_f_l : NULLS LAST\n\t | NULLS FIRST\n\t | limite : LIMIT ENTERO\n\t | LIMIT ALL\n\t | OFFSET ENTERO\n\t | list_expression : list_expression COMA expressionlist_expression : expressionlist_expression_f : list_expression_f COMA expression_f exp_aliaslist_expression_f : expression_f exp_aliasexp_alias : AS CADENA\n | expression_f : SUBSTRING PAR_ABRE expression COMA expression COMA expression PAR_CIERRA\n | SUBSTR PAR_ABRE expression COMA expression COMA expression PAR_CIERRAexpression : expression NOT BETWEEN SYMMETRIC expression AND expressionexpression : expression NOT BETWEEN expression AND expression\n | expression BETWEEN SYMMETRIC expression AND expressionexpression : expression BETWEEN expression AND expressionexpression : expression IS DISTINCT FROM expressionexpression : expression IS NOT DISTINCT FROM expressionexpression : ID PUNTO IDexpression : expression IS NOT NULL\n | expression IS NOT TRUE\n | expression IS NOT FALSE\n | expression IS NOT UNKNOWNexpression : expression IS NULL\n | expression IS TRUE\n | expression IS FALSE\n | expression IS UNKNOWNexpression : expression ISNULL\n | expression NOTNULLexpression : SUM PAR_ABRE expression PAR_CIERRA\n | COUNT PAR_ABRE expression PAR_CIERRA\n | AVG PAR_ABRE expression PAR_CIERRA\n | MAX PAR_ABRE expression PAR_CIERRA\n | MIN PAR_ABRE expression PAR_CIERRA\n | ABS PAR_ABRE expression PAR_CIERRA\n | CBRT PAR_ABRE expression PAR_CIERRA\n | CEIL PAR_ABRE expression PAR_CIERRA\n | CEILING PAR_ABRE expression PAR_CIERRA \n | DEGREES PAR_ABRE expression PAR_CIERRA\n | DIV PAR_ABRE expression COMA expression PAR_CIERRA\n | EXP PAR_ABRE expression PAR_CIERRA\n | FACTORIAL PAR_ABRE expression PAR_CIERRA \n | FLOOR PAR_ABRE expression PAR_CIERRA\n | GCD PAR_ABRE expression COMA expression PAR_CIERRA\n | LN PAR_ABRE expression PAR_CIERRA\n | LOG PAR_ABRE expression PAR_CIERRA\n | MOD PAR_ABRE expression COMA expression PAR_CIERRA\n | PI PAR_ABRE PAR_CIERRA\n | POWER PAR_ABRE expression COMA expression PAR_CIERRA\n | RADIANS PAR_ABRE expression PAR_CIERRA\n | ROUND PAR_ABRE expression PAR_CIERRA\n | SIGN PAR_ABRE expression PAR_CIERRA\n | SQRT PAR_ABRE expression PAR_CIERRA\n | WIDTH_BUCKET PAR_ABRE expression COMA expression COMA expression COMA expression PAR_CIERRA\n | TRUNC PAR_ABRE expression PAR_CIERRA\n | RANDOM PAR_ABRE PAR_CIERRA expression : ACOS PAR_ABRE expression PAR_CIERRA\n | ACOSD PAR_ABRE expression PAR_CIERRA\n | ASIN PAR_ABRE expression PAR_CIERRA\n | ASIND PAR_ABRE expression PAR_CIERRA\n | ATAN PAR_ABRE expression PAR_CIERRA\n | ATAND PAR_ABRE expression PAR_CIERRA\n | ATAN2 PAR_ABRE expression COMA expression PAR_CIERRA\n | ATAN2D PAR_ABRE expression COMA expression PAR_CIERRA\n | COS PAR_ABRE expression PAR_CIERRA\n | COSD PAR_ABRE expression PAR_CIERRA\n | COT PAR_ABRE expression PAR_CIERRA\n | COTD PAR_ABRE expression PAR_CIERRA\n | SIN PAR_ABRE expression PAR_CIERRA\n | SIND PAR_ABRE expression PAR_CIERRA\n | TAN PAR_ABRE expression PAR_CIERRA\n | TAND PAR_ABRE expression PAR_CIERRA\n | SINH PAR_ABRE expression PAR_CIERRA\n | COSH PAR_ABRE expression PAR_CIERRA\n | TANH PAR_ABRE expression PAR_CIERRA\n | ASINH PAR_ABRE expression PAR_CIERRA\n | ACOSH PAR_ABRE expression PAR_CIERRA\n | ATANH PAR_ABRE expression PAR_CIERRA expression_f : SUM PAR_ABRE expression PAR_CIERRA\n | COUNT PAR_ABRE expression PAR_CIERRA\n | AVG PAR_ABRE expression PAR_CIERRA\n | MAX PAR_ABRE expression PAR_CIERRA\n | MIN PAR_ABRE expression PAR_CIERRA\n | ABS PAR_ABRE expression PAR_CIERRA\n | CBRT PAR_ABRE expression PAR_CIERRA\n | CEIL PAR_ABRE expression PAR_CIERRA\n | CEILING PAR_ABRE expression PAR_CIERRA \n | DEGREES PAR_ABRE expression PAR_CIERRA\n | DIV PAR_ABRE expression COMA expression PAR_CIERRA\n | EXP PAR_ABRE expression PAR_CIERRA\n | FACTORIAL PAR_ABRE expression PAR_CIERRA \n | FLOOR PAR_ABRE expression PAR_CIERRA\n | GCD PAR_ABRE expression COMA expression PAR_CIERRA\n | LN PAR_ABRE expression PAR_CIERRA\n | LOG PAR_ABRE expression PAR_CIERRA\n | MOD PAR_ABRE expression COMA expression PAR_CIERRA\n | PI PAR_ABRE PAR_CIERRA\n | POWER PAR_ABRE expression COMA expression PAR_CIERRA\n | RADIANS PAR_ABRE expression PAR_CIERRA\n | ROUND PAR_ABRE expression PAR_CIERRA\n | SIGN PAR_ABRE expression PAR_CIERRA\n | SQRT PAR_ABRE expression PAR_CIERRA\n | WIDTH_BUCKET PAR_ABRE expression COMA expression COMA expression COMA expression PAR_CIERRA\n | TRUNC PAR_ABRE expression PAR_CIERRA\n | RANDOM PAR_ABRE PAR_CIERRA expression_f : ACOS PAR_ABRE expression PAR_CIERRA\n | ACOSD PAR_ABRE expression PAR_CIERRA\n | ASIN PAR_ABRE expression PAR_CIERRA\n | ASIND PAR_ABRE expression PAR_CIERRA\n | ATAN PAR_ABRE expression PAR_CIERRA\n | ATAND PAR_ABRE expression PAR_CIERRA\n | ATAN2 PAR_ABRE expression COMA expression PAR_CIERRA\n | ATAN2D PAR_ABRE expression COMA expression PAR_CIERRA\n | COS PAR_ABRE expression PAR_CIERRA\n | COSD PAR_ABRE expression PAR_CIERRA\n | COT PAR_ABRE expression PAR_CIERRA\n | COTD PAR_ABRE expression PAR_CIERRA\n | SIN PAR_ABRE expression PAR_CIERRA\n | SIND PAR_ABRE expression PAR_CIERRA\n | TAN PAR_ABRE expression PAR_CIERRA\n | TAND PAR_ABRE expression PAR_CIERRA\n | SINH PAR_ABRE expression PAR_CIERRA\n | COSH PAR_ABRE expression PAR_CIERRA\n | TANH PAR_ABRE expression PAR_CIERRA\n | ASINH PAR_ABRE expression PAR_CIERRA\n | ACOSH PAR_ABRE expression PAR_CIERRA\n | ATANH PAR_ABRE expression PAR_CIERRA expression_f : NOW PAR_ABRE PAR_CIERRA\n | TIMESTAMP CADENA\n | CURRENT_TIME\n | CURRENT_DATE \n | DATE_PART PAR_ABRE expression COMA INTERVAL expression PAR_CIERRA\n | EXTRACT PAR_ABRE YEAR FROM TIMESTAMP expression PAR_CIERRA\n | EXTRACT PAR_ABRE MONTH FROM TIMESTAMP expression PAR_CIERRA\n | EXTRACT PAR_ABRE DAY FROM TIMESTAMP expression PAR_CIERRA\n | EXTRACT PAR_ABRE HOUR FROM TIMESTAMP expression PAR_CIERRA\n | EXTRACT PAR_ABRE MINUTE FROM TIMESTAMP expression PAR_CIERRA\n | EXTRACT PAR_ABRE SECOND FROM TIMESTAMP expression PAR_CIERRAexpression_f : LENGTH PAR_ABRE expression PAR_CIERRA\n | TRIM PAR_ABRE expression PAR_CIERRA\n | MD5 PAR_ABRE expression PAR_CIERRA\n | SHA256 PAR_ABRE expression PAR_CIERRA\n | DECODE PAR_ABRE expression COMA expression PAR_CIERRA\n | ENCODE PAR_ABRE expression CASTEO BYTEA COMA expression PAR_CIERRA\n | CONVERT PAR_ABRE expression AS DATE PAR_CIERRA\n | CONVERT PAR_ABRE expression AS INTEGER PAR_CIERRA\n | CONVERT PAR_ABRE expression AS BIGINT PAR_CIERRA\n | CONVERT PAR_ABRE expression AS DECIMAL PAR_CIERRA\n | CONVERT PAR_ABRE expression AS NUMERIC PAR_CIERRA\n | CONVERT PAR_ABRE expression AS REAL PAR_CIERRA\n | CONVERT PAR_ABRE expression AS MONEY PAR_CIERRA\n | CONVERT PAR_ABRE expression AS CHARACTER PAR_CIERRA\n | CONVERT PAR_ABRE expression AS CHAR PAR_CIERRA\n | CONVERT PAR_ABRE expression AS TEXT PAR_CIERRA\n | CONVERT PAR_ABRE expression AS TIME PAR_CIERRA\n | CONVERT PAR_ABRE expression AS VARCHAR PAR_CIERRA\n | CONVERT PAR_ABRE expression AS TIMESTAMP PAR_CIERRA\n | GET_BYTE PAR_ABRE expression CASTEO BYTEA COMA expression PAR_CIERRA\n | SET_BYTE PAR_ABRE expression CASTEO BYTEA COMA expression COMA expression PAR_CIERRAexpression : seleccionarexpression : PAR_ABRE expression PAR_CIERRAexpression : expression MAYOR expressionexpression : expression MENOR expressionexpression : expression MAYOR_IGUAL expressionexpression : expression MENOR_IGUAL expressionexpression : expression IGUAL expressionexpression : expression NO_IGUAL expressionexpression : expression DIFERENTE expressionexpression : expression AND expressionexpression : expression OR expressionexpression : NOT expressionexpression : ID\n | ASTERISCOexpression : ENTEROexpression : DECIMAL_NUMexpression : NULLexpression : CADENAalias_list : alias_list COMA alias_itemalias_list : alias_itemalias_item : ID AS ID'
_lr_action_items = {'SHOW':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[11,11,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'INSERT':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[12,12,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'UPDATE':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[14,14,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'DELETE':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[15,15,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'USE':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[16,16,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'CREATE':([0,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[17,17,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'ALTER':([0,2,3,21,22,23,24,25,26,27,28,114,124,267,268,453,608,831,834,922,],[18,18,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,275,-16,-17,275,-15,-12,-14,-13,]),'DROP':([0,2,3,21,22,23,24,25,26,27,28,114,124,267,268,453,608,831,834,922,],[19,19,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,279,-16,-17,279,-15,-12,-14,-13,]),'SELECT':([0,2,3,13,20,21,22,23,24,25,26,27,28,42,43,44,46,114,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,267,268,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,428,429,430,431,443,465,467,534,535,546,550,553,554,559,567,568,594,608,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,831,833,834,853,860,867,898,905,908,919,922,],[20,20,-3,20,-118,-2,-4,-5,-6,-7,-8,-9,-10,20,20,20,-117,-11,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,-16,-17,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,-15,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,-12,20,-14,20,20,20,20,20,20,20,-13,]),'PAR_ABRE':([0,2,3,20,21,22,23,24,25,26,27,28,42,43,44,46,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,103,104,105,106,107,108,109,110,111,112,113,114,115,120,131,134,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,260,262,263,264,266,267,268,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,439,440,443,465,467,534,535,546,550,553,554,559,567,568,594,608,618,619,620,627,628,633,637,638,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,744,750,757,759,762,763,764,785,788,790,798,799,804,815,829,830,831,833,834,847,853,860,867,893,898,905,908,919,922,],[13,13,-3,-118,-2,-4,-5,-6,-7,-8,-9,-10,131,131,131,-117,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,249,250,251,252,253,254,255,256,257,258,259,-11,261,269,284,284,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,423,428,429,430,284,-16,-17,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,632,633,284,284,284,284,284,284,284,284,284,284,284,284,284,-15,754,755,756,761,762,284,768,768,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,833,284,840,841,284,768,768,284,284,284,284,284,284,284,284,284,-12,284,-14,892,284,284,284,768,131,284,284,284,-13,]),'$end':([1,2,3,21,22,23,24,25,26,27,28,114,267,268,608,831,834,922,],[0,-1,-3,-2,-4,-5,-6,-7,-8,-9,-10,-11,-16,-17,-15,-12,-14,-13,]),'PUNTOCOMA':([4,5,6,7,8,9,10,29,45,47,101,102,118,119,127,132,133,135,136,186,187,188,189,190,191,192,194,248,272,276,277,278,281,292,293,302,353,354,375,383,406,424,426,427,432,439,441,442,448,454,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,607,610,611,612,613,614,615,617,621,622,623,624,625,627,629,634,635,637,639,642,645,647,648,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,741,743,745,746,747,748,749,753,758,763,767,771,778,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,835,844,845,850,851,854,855,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,894,895,896,897,900,901,903,904,906,907,909,910,911,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[22,23,24,25,26,27,28,114,-114,-144,-264,-265,267,268,-41,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-31,-32,-34,-77,-40,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,608,-79,-72,-71,-28,-92,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-110,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,-106,-78,-70,-73,-74,-80,-30,-35,-33,-94,-95,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,831,-314,-84,-18,-19,-20,834,-56,-21,-91,-88,-22,-93,-77,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-109,-75,-76,-27,-36,-104,-96,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-29,-102,-103,-138,-124,-147,-145,-146,-278,-292,922,-61,-105,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'DATABASES':([11,],[29,]),'INTO':([12,],[30,]),'ID':([14,20,30,33,34,35,38,39,40,41,42,43,44,46,117,121,125,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,261,266,269,270,282,283,284,287,289,290,294,295,296,297,298,299,300,301,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,445,450,452,455,456,457,458,459,465,467,534,535,546,550,553,554,559,567,568,594,601,603,626,632,633,641,643,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,761,762,768,769,773,785,788,790,798,799,804,815,829,830,833,841,848,853,858,860,867,892,898,899,905,908,919,],[32,-118,115,118,119,120,123,124,-43,127,135,135,135,-117,265,-26,281,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,424,135,433,448,-42,424,135,135,135,135,135,135,135,135,135,135,135,135,483,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,636,642,644,646,647,-97,-98,648,135,135,135,135,135,135,135,135,135,135,135,135,743,424,433,424,135,-25,775,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,424,135,424,847,850,135,135,135,135,135,135,135,135,135,135,888,893,135,424,135,135,912,135,424,135,135,135,]),'FROM':([15,45,47,101,102,128,129,130,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,408,409,410,411,412,413,424,426,427,460,462,463,464,468,469,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[33,-114,-144,-264,-265,283,-119,-120,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,584,585,586,587,588,589,-86,-85,-87,-123,-115,-295,-139,-303,655,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,790,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'DATABASE':([16,17,18,19,36,122,],[34,-24,38,40,121,-23,]),'TABLE':([17,18,19,],[35,39,41,]),'OR':([17,45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[37,-114,-144,-264,-265,-307,-116,301,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,301,-162,-163,-305,-144,-143,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,-231,301,301,301,301,301,301,301,-239,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,-262,301,301,301,301,301,301,301,301,301,301,-86,-85,-87,301,-123,301,-115,-295,301,301,-303,-158,-159,-160,-161,-296,-297,-298,-299,301,-301,301,-304,-153,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,-182,301,301,301,301,301,301,301,-190,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,301,301,301,-125,301,301,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,301,301,301,301,301,301,301,301,301,301,-314,-84,301,-127,301,301,-150,301,301,301,301,301,301,301,301,-223,-227,-230,-232,-246,-247,301,301,301,301,301,301,301,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,301,301,301,-129,-148,-149,301,-174,-178,-181,-183,-197,-198,301,301,301,-266,-267,-268,-269,-270,-271,-272,301,301,301,301,-138,-124,-147,301,-145,-146,-278,-292,-111,-128,301,301,-135,-136,-137,-134,-130,-131,301,-237,-293,-126,-188,-132,-133,]),'GREATEST':([20,],[43,]),'LEAST':([20,],[44,]),'DISTINCT':([20,291,470,],[46,469,656,]),'ASTERISCO':([20,42,43,44,46,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[-118,129,191,191,-117,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,]),'SUM':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[50,137,137,137,-117,137,137,50,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,]),'COUNT':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[51,138,138,138,-117,138,138,51,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,]),'AVG':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[52,139,139,139,-117,139,139,52,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,]),'MAX':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[53,140,140,140,-117,140,140,53,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,]),'MIN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[54,141,141,141,-117,141,141,54,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,]),'ABS':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[55,142,142,142,-117,142,142,55,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,]),'CBRT':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[56,143,143,143,-117,143,143,56,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,]),'CEIL':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[57,144,144,144,-117,144,144,57,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,]),'CEILING':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[58,145,145,145,-117,145,145,58,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,]),'DEGREES':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[59,146,146,146,-117,146,146,59,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,]),'DIV':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[60,147,147,147,-117,147,147,60,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,]),'EXP':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[61,148,148,148,-117,148,148,61,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,]),'FACTORIAL':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[62,149,149,149,-117,149,149,62,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,]),'FLOOR':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[63,150,150,150,-117,150,150,63,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,]),'GCD':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[64,151,151,151,-117,151,151,64,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,]),'LN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[65,152,152,152,-117,152,152,65,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,]),'LOG':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[66,153,153,153,-117,153,153,66,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'MOD':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[67,154,154,154,-117,154,154,67,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,]),'PI':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[68,155,155,155,-117,155,155,68,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,]),'POWER':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[69,156,156,156,-117,156,156,69,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,]),'RADIANS':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[70,157,157,157,-117,157,157,70,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,]),'ROUND':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[71,158,158,158,-117,158,158,71,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'SIGN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[72,159,159,159,-117,159,159,72,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'SQRT':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[73,160,160,160,-117,160,160,73,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,]),'WIDTH_BUCKET':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[74,161,161,161,-117,161,161,74,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'TRUNC':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[75,162,162,162,-117,162,162,75,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'RANDOM':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[76,163,163,163,-117,163,163,76,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,]),'ACOS':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[77,164,164,164,-117,164,164,77,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'ACOSD':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[78,165,165,165,-117,165,165,78,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,]),'ASIN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[79,166,166,166,-117,166,166,79,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,]),'ASIND':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[80,167,167,167,-117,167,167,80,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,]),'ATAN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[81,168,168,168,-117,168,168,81,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,]),'ATAND':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[82,169,169,169,-117,169,169,82,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,]),'ATAN2':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[83,170,170,170,-117,170,170,83,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,]),'ATAN2D':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[84,171,171,171,-117,171,171,84,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,]),'COS':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[85,172,172,172,-117,172,172,85,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,]),'COSD':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[86,173,173,173,-117,173,173,86,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,]),'COT':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[87,174,174,174,-117,174,174,87,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,]),'COTD':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[88,175,175,175,-117,175,175,88,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,]),'SIN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[89,176,176,176,-117,176,176,89,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,]),'SIND':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[90,177,177,177,-117,177,177,90,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,]),'TAN':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[91,178,178,178,-117,178,178,91,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,]),'TAND':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[92,179,179,179,-117,179,179,92,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,]),'SINH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[93,180,180,180,-117,180,180,93,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,]),'COSH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[94,181,181,181,-117,181,181,94,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,]),'TANH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[95,182,182,182,-117,182,182,95,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,]),'ASINH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[96,183,183,183,-117,183,183,96,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,]),'ACOSH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[97,184,184,184,-117,184,184,97,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,]),'ATANH':([20,42,43,44,46,131,134,193,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[98,185,185,185,-117,185,185,98,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,]),'NOT':([20,42,43,44,45,46,47,101,102,129,131,132,133,134,135,136,186,187,188,189,190,191,192,194,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,248,249,251,252,253,254,255,256,257,258,259,266,269,271,278,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,423,424,426,427,431,432,439,441,442,443,460,461,462,463,464,465,466,467,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,594,600,607,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,633,634,635,637,649,650,651,652,653,654,655,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,742,743,745,750,751,752,753,762,763,766,767,779,781,782,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,832,833,835,836,843,844,845,853,856,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,884,885,886,889,890,891,893,896,897,898,900,901,902,903,904,905,906,907,908,910,913,914,917,919,920,921,923,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-118,134,134,134,-114,-117,-144,-264,-265,-307,134,-116,288,134,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,-263,134,134,134,134,134,134,134,134,134,134,134,444,449,444,134,288,134,134,134,470,-162,-163,134,134,134,134,134,134,134,134,288,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,-144,-143,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,-231,288,288,288,288,288,288,288,-239,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,-262,288,288,288,288,288,288,288,288,288,288,134,-86,-85,-87,134,288,-79,-72,-71,134,-123,288,-115,-295,288,134,288,134,288,-158,-159,-160,-161,288,288,288,288,288,288,288,288,-153,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,-182,288,288,288,288,288,288,288,-190,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,-141,134,134,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,134,-224,-225,-226,134,-228,-229,134,134,-233,-234,-235,-236,134,-238,-240,-241,-242,-243,-244,-245,134,134,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,134,288,288,444,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,444,-78,-70,134,288,-74,-80,-125,134,288,134,134,288,134,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,134,-175,-176,-177,134,-179,-180,134,134,-184,-185,-186,-187,134,-189,-191,-192,-193,-194,-195,-196,134,134,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,288,288,288,288,288,288,288,288,288,134,134,134,134,134,134,134,288,134,-314,-84,134,444,-67,-56,134,-91,288,-88,852,444,-127,288,134,288,288,134,288,134,288,288,288,288,288,288,288,134,134,-223,-227,-230,-232,134,-246,-247,288,288,288,288,288,288,288,-277,134,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,134,134,288,134,288,-66,288,-75,-76,134,-129,288,134,288,288,-174,-178,-181,-183,134,-197,-198,288,288,288,-266,-267,-268,-269,-270,-271,-272,288,288,288,-58,-59,-60,-68,-69,-90,-91,288,-138,134,-124,288,288,-145,-146,134,-278,-292,134,-61,-89,-111,-128,134,288,288,-81,-135,-136,-137,-134,-130,-131,288,-237,-293,-126,-188,-132,-133,]),'ENTERO':([20,42,43,44,46,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,754,755,756,762,785,788,790,798,799,804,815,829,830,833,840,849,853,860,867,898,905,908,915,916,919,],[-118,187,187,187,-117,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,837,838,839,187,187,187,187,187,187,187,187,187,187,187,887,894,187,187,187,187,187,187,924,926,187,]),'DECIMAL_NUM':([20,42,43,44,46,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[-118,188,188,188,-117,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,]),'NULL':([20,42,43,44,46,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,291,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,444,465,467,470,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,852,853,860,867,898,905,908,919,],[-118,136,136,136,-117,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,471,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,635,136,136,657,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,895,136,136,136,136,136,136,136,]),'CADENA':([20,42,43,44,46,100,131,134,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[-118,189,189,189,-117,248,189,189,354,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,]),'SUBSTRING':([20,193,],[48,48,]),'SUBSTR':([20,193,],[49,49,]),'NOW':([20,193,],[99,99,]),'TIMESTAMP':([20,193,433,584,585,586,587,588,589,596,646,780,],[100,100,623,718,719,720,721,722,723,738,623,623,]),'CURRENT_TIME':([20,193,],[101,101,]),'CURRENT_DATE':([20,193,],[102,102,]),'DATE_PART':([20,193,],[103,103,]),'EXTRACT':([20,193,],[104,104,]),'LENGTH':([20,193,],[105,105,]),'TRIM':([20,193,],[106,106,]),'MD5':([20,193,],[107,107,]),'SHA256':([20,193,],[108,108,]),'DECODE':([20,193,],[109,109,]),'ENCODE':([20,193,],[110,110,]),'CONVERT':([20,193,],[111,111,]),'GET_BYTE':([20,193,],[112,112,]),'SET_BYTE':([20,193,],[113,113,]),'PAR_CIERRA':([31,45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,216,224,247,248,269,285,286,292,293,302,322,330,353,354,357,358,359,360,361,362,363,364,365,366,368,369,370,372,373,375,377,378,379,380,382,383,384,385,386,387,388,389,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,414,415,416,417,424,425,426,427,434,435,436,437,439,441,442,460,461,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,495,496,497,499,500,502,504,505,506,507,509,510,511,512,513,514,515,516,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,599,600,604,605,606,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,710,711,712,713,715,716,724,726,727,728,729,730,731,732,733,734,735,736,737,738,743,745,751,752,753,760,763,765,766,767,782,784,787,789,791,792,793,794,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,836,837,838,839,842,843,844,845,846,856,859,861,862,863,864,865,866,868,869,870,871,873,874,875,876,877,878,879,880,881,883,884,885,886,887,888,889,890,891,893,897,900,901,903,904,906,907,910,912,913,914,917,920,921,923,924,925,926,927,928,929,930,931,932,933,935,936,937,],[116,-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,375,383,406,-263,-77,462,463,-162,-163,-305,502,510,-144,-143,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,-231,555,556,557,558,560,-239,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,-262,590,591,592,593,-86,602,-85,-87,625,-45,-46,-47,-79,-72,-71,-123,463,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,-182,680,681,682,683,685,-190,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,741,-108,746,747,748,-49,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,-77,-78,-70,-73,-74,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,-314,-84,-48,-67,-56,-44,-91,844,845,-88,-127,-122,-150,-151,863,864,865,866,868,869,-223,-227,-230,-232,-246,-247,873,874,875,876,877,878,879,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-107,-66,884,885,886,889,890,-75,-76,891,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,903,904,-266,-267,-268,-269,-270,-271,-272,906,907,909,-58,-59,-60,910,911,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,923,-89,-111,-128,931,932,-81,-135,-136,-137,-134,-130,-131,935,-237,-293,-126,-188,-132,-133,]),'SET':([32,644,],[117,779,]),'REPLACE':([37,],[122,]),'IF':([40,121,],[126,271,]),'BETWEEN':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,288,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,289,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,289,465,-162,-163,-305,-144,-143,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,-231,289,289,289,289,289,289,289,-239,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,-262,289,289,289,289,289,289,289,289,289,289,-86,-85,-87,289,-123,289,-115,-295,289,289,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,289,-304,-153,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,-182,289,289,289,289,289,289,289,-190,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,289,289,289,-125,289,289,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,289,289,289,289,289,289,289,289,289,289,-314,-84,289,-127,289,289,-150,289,289,289,289,289,289,289,289,-223,-227,-230,-232,-246,-247,289,289,289,289,289,289,289,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,289,289,289,-129,-148,-149,289,-174,-178,-181,-183,-197,-198,289,289,289,-266,-267,-268,-269,-270,-271,-272,289,289,289,289,-138,-124,-147,289,-145,-146,-278,-292,-111,-128,289,289,-135,-136,-137,-134,-130,-131,289,-237,-293,-126,-188,-132,-133,]),'IS':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,291,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,291,-162,-163,-305,-144,-143,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,-231,291,291,291,291,291,291,291,-239,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,-262,291,291,291,291,291,291,291,291,291,291,-86,-85,-87,291,-123,291,-115,-295,291,291,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,291,-304,-153,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,-182,291,291,291,291,291,291,291,-190,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,291,291,291,-125,291,291,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,291,291,291,291,291,291,291,291,291,291,-314,-84,291,-127,291,291,-150,291,291,291,291,291,291,291,291,-223,-227,-230,-232,-246,-247,291,291,291,291,291,291,291,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,291,291,291,-129,-148,-149,291,-174,-178,-181,-183,-197,-198,291,291,291,-266,-267,-268,-269,-270,-271,-272,291,291,291,291,-138,-124,-147,291,-145,-146,-278,-292,-111,-128,291,291,-135,-136,-137,-134,-130,-131,291,-237,-293,-126,-188,-132,-133,]),'ISNULL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,292,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,292,-162,-163,-305,-144,-143,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,-231,292,292,292,292,292,292,292,-239,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,-262,292,292,292,292,292,292,292,292,292,292,-86,-85,-87,292,-123,292,-115,-295,292,292,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,292,-304,-153,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,-182,292,292,292,292,292,292,292,-190,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,292,292,292,-125,292,292,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,292,292,292,292,292,292,292,292,292,292,-314,-84,292,-127,292,292,-150,292,292,292,292,292,292,292,292,-223,-227,-230,-232,-246,-247,292,292,292,292,292,292,292,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,292,292,292,-129,-148,-149,292,-174,-178,-181,-183,-197,-198,292,292,292,-266,-267,-268,-269,-270,-271,-272,292,292,292,292,-138,-124,-147,292,-145,-146,-278,-292,-111,-128,292,292,-135,-136,-137,-134,-130,-131,292,-237,-293,-126,-188,-132,-133,]),'NOTNULL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,293,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,293,-162,-163,-305,-144,-143,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,-231,293,293,293,293,293,293,293,-239,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,-262,293,293,293,293,293,293,293,293,293,293,-86,-85,-87,293,-123,293,-115,-295,293,293,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,293,-304,-153,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,-182,293,293,293,293,293,293,293,-190,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,293,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,293,293,293,-125,293,293,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,293,293,293,293,293,293,293,293,293,293,-314,-84,293,-127,293,293,-150,293,293,293,293,293,293,293,293,-223,-227,-230,-232,-246,-247,293,293,293,293,293,293,293,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,293,293,293,-129,-148,-149,293,-174,-178,-181,-183,-197,-198,293,293,293,-266,-267,-268,-269,-270,-271,-272,293,293,293,293,-138,-124,-147,293,-145,-146,-278,-292,-111,-128,293,293,-135,-136,-137,-134,-130,-131,293,-237,-293,-126,-188,-132,-133,]),'MAYOR':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,294,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,294,-162,-163,-305,-144,-143,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,-231,294,294,294,294,294,294,294,-239,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,-262,294,294,294,294,294,294,294,294,294,294,-86,-85,-87,294,-123,294,-115,-295,294,294,294,-158,-159,-160,-161,None,None,None,None,294,294,294,294,-153,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,-182,294,294,294,294,294,294,294,-190,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,294,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,294,294,294,-125,294,294,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,294,294,294,294,294,294,294,294,294,294,-314,-84,294,-127,294,294,294,294,294,294,294,294,294,294,294,-223,-227,-230,-232,-246,-247,294,294,294,294,294,294,294,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,294,294,294,-129,294,294,294,-174,-178,-181,-183,-197,-198,294,294,294,-266,-267,-268,-269,-270,-271,-272,294,294,294,294,-138,-124,294,294,-145,-146,-278,-292,-111,-128,294,294,-135,-136,-137,-134,-130,-131,294,-237,-293,-126,-188,-132,-133,]),'MENOR':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,295,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,295,-162,-163,-305,-144,-143,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,-231,295,295,295,295,295,295,295,-239,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,-262,295,295,295,295,295,295,295,295,295,295,-86,-85,-87,295,-123,295,-115,-295,295,295,295,-158,-159,-160,-161,None,None,None,None,295,295,295,295,-153,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,-182,295,295,295,295,295,295,295,-190,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,295,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,295,295,295,-125,295,295,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,295,295,295,295,295,295,295,295,295,295,-314,-84,295,-127,295,295,295,295,295,295,295,295,295,295,295,-223,-227,-230,-232,-246,-247,295,295,295,295,295,295,295,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,295,295,295,-129,295,295,295,-174,-178,-181,-183,-197,-198,295,295,295,-266,-267,-268,-269,-270,-271,-272,295,295,295,295,-138,-124,295,295,-145,-146,-278,-292,-111,-128,295,295,-135,-136,-137,-134,-130,-131,295,-237,-293,-126,-188,-132,-133,]),'MAYOR_IGUAL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,296,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,296,-162,-163,-305,-144,-143,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,-231,296,296,296,296,296,296,296,-239,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,-262,296,296,296,296,296,296,296,296,296,296,-86,-85,-87,296,-123,296,-115,-295,296,296,296,-158,-159,-160,-161,None,None,None,None,296,296,296,296,-153,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,-182,296,296,296,296,296,296,296,-190,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,296,296,296,-125,296,296,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,296,296,296,296,296,296,296,296,296,296,-314,-84,296,-127,296,296,296,296,296,296,296,296,296,296,296,-223,-227,-230,-232,-246,-247,296,296,296,296,296,296,296,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,296,296,296,-129,296,296,296,-174,-178,-181,-183,-197,-198,296,296,296,-266,-267,-268,-269,-270,-271,-272,296,296,296,296,-138,-124,296,296,-145,-146,-278,-292,-111,-128,296,296,-135,-136,-137,-134,-130,-131,296,-237,-293,-126,-188,-132,-133,]),'MENOR_IGUAL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,297,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,297,-162,-163,-305,-144,-143,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,-231,297,297,297,297,297,297,297,-239,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,-262,297,297,297,297,297,297,297,297,297,297,-86,-85,-87,297,-123,297,-115,-295,297,297,297,-158,-159,-160,-161,None,None,None,None,297,297,297,297,-153,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,-182,297,297,297,297,297,297,297,-190,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,297,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,297,297,297,-125,297,297,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,297,297,297,297,297,297,297,297,297,297,-314,-84,297,-127,297,297,297,297,297,297,297,297,297,297,297,-223,-227,-230,-232,-246,-247,297,297,297,297,297,297,297,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,297,297,297,-129,297,297,297,-174,-178,-181,-183,-197,-198,297,297,297,-266,-267,-268,-269,-270,-271,-272,297,297,297,297,-138,-124,297,297,-145,-146,-278,-292,-111,-128,297,297,-135,-136,-137,-134,-130,-131,297,-237,-293,-126,-188,-132,-133,]),'IGUAL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,265,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,640,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,772,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,298,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,431,298,-162,-163,-305,-144,-143,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,-231,298,298,298,298,298,298,298,-239,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,-262,298,298,298,298,298,298,298,298,298,298,-86,-85,-87,298,-123,298,-115,-295,298,298,-303,-158,-159,-160,-161,-296,-297,-298,-299,298,-301,298,-304,-153,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,-182,298,298,298,298,298,298,298,-190,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,298,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,298,298,298,773,-125,298,298,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,298,298,298,298,298,298,298,298,298,298,-314,-84,298,849,-127,298,298,-150,298,298,298,298,298,298,298,298,-223,-227,-230,-232,-246,-247,298,298,298,298,298,298,298,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,298,298,298,-129,-148,-149,298,-174,-178,-181,-183,-197,-198,298,298,298,-266,-267,-268,-269,-270,-271,-272,298,298,298,298,-138,-124,-147,298,-145,-146,-278,-292,-111,-128,298,298,-135,-136,-137,-134,-130,-131,298,-237,-293,-126,-188,-132,-133,]),'NO_IGUAL':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,299,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,299,-162,-163,-305,-144,-143,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,-231,299,299,299,299,299,299,299,-239,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,-262,299,299,299,299,299,299,299,299,299,299,-86,-85,-87,299,-123,299,-115,-295,299,299,299,-158,-159,-160,-161,-296,-297,-298,-299,299,-301,299,299,-153,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,-182,299,299,299,299,299,299,299,-190,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,299,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,299,299,299,-125,299,299,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,299,299,299,299,299,299,299,299,299,299,-314,-84,299,-127,299,299,299,299,299,299,299,299,299,299,299,-223,-227,-230,-232,-246,-247,299,299,299,299,299,299,299,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,299,299,299,-129,299,299,299,-174,-178,-181,-183,-197,-198,299,299,299,-266,-267,-268,-269,-270,-271,-272,299,299,299,299,-138,-124,299,299,-145,-146,-278,-292,-111,-128,299,299,-135,-136,-137,-134,-130,-131,299,-237,-293,-126,-188,-132,-133,]),'DIFERENTE':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,300,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,300,-162,-163,-305,-144,-143,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,-231,300,300,300,300,300,300,300,-239,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,-262,300,300,300,300,300,300,300,300,300,300,-86,-85,-87,300,-123,300,-115,-295,300,300,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,300,-304,-153,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,-182,300,300,300,300,300,300,300,-190,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,300,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,300,300,300,-125,300,300,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,300,300,300,300,300,300,300,300,300,300,-314,-84,300,-127,300,300,-150,300,300,300,300,300,300,300,300,-223,-227,-230,-232,-246,-247,300,300,300,300,300,300,300,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,300,300,300,-129,-148,-149,300,-174,-178,-181,-183,-197,-198,300,300,300,-266,-267,-268,-269,-270,-271,-272,300,300,300,300,-138,-124,-147,300,-145,-146,-278,-292,-111,-128,300,300,-135,-136,-137,-134,-130,-131,300,-237,-293,-126,-188,-132,-133,]),'AND':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,286,292,293,302,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,414,415,416,417,418,419,420,421,422,424,426,427,432,460,461,462,463,464,466,468,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,600,607,634,649,651,654,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,724,743,745,766,782,784,786,787,789,791,792,793,794,795,796,797,800,801,802,803,805,806,807,808,809,810,811,812,813,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,835,843,856,859,861,862,863,864,865,866,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,896,897,900,901,902,903,904,906,907,914,917,920,921,924,925,926,927,928,929,930,931,932,933,935,936,937,],[-114,-144,-264,-265,-307,-116,290,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,290,-162,-163,-305,-144,-143,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,-231,290,290,290,290,290,290,290,-239,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,-262,290,290,290,290,290,290,290,290,290,290,-86,-85,-87,290,-123,290,-115,-295,290,653,-303,-158,-159,-160,-161,-296,-297,-298,-299,290,-301,290,290,-153,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,-182,290,290,290,290,290,290,290,-190,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,290,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,290,290,290,-125,785,788,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,290,290,290,290,290,290,290,290,290,290,-314,-84,290,-127,290,860,-150,290,290,290,290,290,290,290,290,-223,-227,-230,-232,-246,-247,290,290,290,290,290,290,290,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,290,290,290,-129,-148,-149,290,-174,-178,-181,-183,-197,-198,290,290,290,-266,-267,-268,-269,-270,-271,-272,290,290,290,290,-138,-124,-147,290,-145,-146,-278,-292,-111,-128,290,290,-135,-136,-137,-134,-130,-131,290,-237,-293,-126,-188,-132,-133,]),'COMA':([45,47,101,102,129,132,133,135,136,186,187,188,189,190,191,192,194,248,269,276,277,278,285,286,292,293,302,353,354,355,356,367,371,374,375,376,381,383,390,391,406,407,418,424,425,426,427,434,435,436,437,439,441,442,454,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,494,498,501,502,503,508,510,517,518,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,599,600,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,637,645,647,648,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,714,725,739,740,743,745,751,752,753,760,763,765,767,778,781,782,784,787,789,795,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,832,836,842,844,845,846,854,855,856,859,861,862,863,864,865,866,868,869,872,873,874,875,876,877,878,879,882,883,884,885,886,889,890,891,893,895,896,897,900,901,902,903,904,906,907,910,913,914,917,918,923,924,925,926,927,928,929,931,932,933,935,936,937,],[193,-144,-264,-265,-307,287,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-77,453,-34,-77,287,-140,-162,-163,-305,-144,-143,534,535,546,550,553,-231,554,559,-239,567,568,-262,583,594,-86,603,-85,-87,626,-45,-46,-47,-79,-72,-71,-92,603,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,671,675,678,-182,679,684,-190,692,693,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,742,-108,-49,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,-77,-78,-70,-73,-74,-80,-33,-94,-95,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,798,799,804,815,829,830,-314,-84,-48,-67,-56,-44,-91,603,-88,-93,-77,-127,-122,-150,-151,867,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-107,-66,603,-75,-76,603,-104,-96,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,905,-266,-267,-268,-269,-270,-271,-272,908,742,-58,-59,-60,-68,-69,-90,-91,-102,-103,-138,603,-147,919,-145,-146,-278,-292,-61,-89,-111,-128,603,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'CASTEO':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,419,421,422,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,595,597,598,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'AS':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,420,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,195,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,195,-143,-231,-239,-262,596,601,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'LIMIT':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,915,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'OFFSET':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,916,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'WHERE':([45,47,101,102,118,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,607,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,266,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,650,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,750,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'DEFAULT':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,292,293,302,353,354,375,383,406,424,426,427,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,779,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,443,443,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,443,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,443,-78,-70,-73,-74,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,443,-67,-56,-91,-88,853,443,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'UNIQUE':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,292,293,302,353,354,375,383,406,424,426,427,438,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,636,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,439,439,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,627,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,439,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,439,-78,-70,-73,-74,-82,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,439,-67,-56,-91,-88,439,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'CHECK':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,292,293,302,353,354,375,383,406,424,426,427,438,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,636,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,440,440,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,628,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,440,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,440,-78,-70,-73,-74,-82,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,440,-67,-56,-91,-88,440,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'CONSTRAINT':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,279,292,293,302,353,354,375,383,406,424,426,427,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,445,445,457,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,445,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,445,-78,-70,-73,-74,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,445,-67,-56,-91,-88,445,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'PRIMARY':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,292,293,302,353,354,375,383,406,424,426,427,438,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,636,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,446,446,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,630,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,446,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,446,-78,-70,-73,-74,-82,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,446,-67,-56,-91,-88,446,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'FOREIGN':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,269,278,292,293,302,353,354,375,383,406,424,426,427,438,439,441,442,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,609,610,611,612,613,614,615,617,621,622,623,624,626,627,629,634,635,636,637,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,751,752,753,763,767,781,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,836,844,845,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,884,885,886,889,890,891,893,897,900,901,903,904,906,907,910,913,914,917,923,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,447,447,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,631,-79,-72,-71,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,447,-50,-51,-52,-53,-54,-55,-57,-62,-63,-64,-65,447,-78,-70,-73,-74,-82,-80,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,447,-67,-56,-91,-88,447,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-66,-75,-76,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-58,-59,-60,-68,-69,-90,-91,-138,-124,-147,-145,-146,-278,-292,-61,-89,-111,-128,-81,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'GROUP':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,783,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'ORDER':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,857,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,-129,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'HAVING':([45,47,101,102,132,133,135,136,186,187,188,189,190,191,192,194,248,292,293,302,353,354,375,383,406,424,426,427,460,462,463,464,468,471,472,473,474,475,476,477,478,479,480,481,482,483,502,510,533,536,537,538,539,540,541,542,543,544,545,547,548,549,551,552,555,556,557,558,560,561,562,563,564,565,566,569,570,571,572,573,574,575,576,577,578,579,580,581,582,590,591,592,593,649,657,658,659,660,661,662,663,664,665,666,667,668,669,670,672,673,674,676,677,680,681,682,683,685,686,687,688,689,690,691,694,695,696,697,698,699,700,701,702,703,704,705,706,707,743,745,782,784,787,789,800,801,802,803,805,806,814,816,817,818,819,820,821,822,823,824,825,826,827,828,856,859,861,862,863,864,865,866,868,869,873,874,875,876,877,878,879,897,900,901,903,904,906,907,914,917,924,925,926,927,928,929,931,932,933,935,936,937,],[-114,-144,-264,-265,-116,-140,-306,-310,-294,-308,-309,-311,-112,-307,-113,-142,-263,-162,-163,-305,-144,-143,-231,-239,-262,-86,-85,-87,-123,-115,-295,-139,-303,-158,-159,-160,-161,-296,-297,-298,-299,-300,-301,-302,-304,-153,-182,-190,-141,-213,-214,-215,-216,-217,-218,-219,-220,-221,-222,-224,-225,-226,-228,-229,-233,-234,-235,-236,-238,-240,-241,-242,-243,-244,-245,-248,-249,-250,-251,-252,-253,-254,-255,-256,-257,-258,-259,-260,-261,-273,-274,-275,-276,-125,-154,-155,-156,-157,-164,-165,-166,-167,-168,-169,-170,-171,-172,-173,-175,-176,-177,-179,-180,-184,-185,-186,-187,-189,-191,-192,-193,-194,-195,-196,-199,-200,-201,-202,-203,-204,-205,-206,-207,-208,-209,-210,-211,-212,-314,-84,-127,-122,-150,-151,-223,-227,-230,-232,-246,-247,-277,-279,-280,-281,-282,-283,-284,-285,-286,-287,-288,-289,-290,-291,898,-148,-149,-152,-174,-178,-181,-183,-197,-198,-266,-267,-268,-269,-270,-271,-272,-138,-124,-147,-145,-146,-278,-292,-111,-128,-135,-136,-137,-134,-130,-131,-237,-293,-126,-188,-132,-133,]),'VALUES':([115,602,],[260,744,]),'UNION':([116,],[262,]),'INTERSECT':([116,],[263,]),'EXCEPT':([116,],[264,]),'RENAME':([123,124,453,],[273,280,280,]),'OWNER':([123,448,],[274,640,]),'ADD':([124,453,],[278,278,]),'EXISTS':([126,449,],[282,641,]),'PUNTO':([135,],[303,]),'YEAR':([250,],[408,]),'MONTH':([250,],[409,]),'DAY':([250,],[410,]),'HOUR':([250,],[411,]),'MINUTE':([250,],[412,]),'SECOND':([250,],[413,]),'TO':([273,274,280,],[450,451,459,]),'COLUMN':([275,278,279,],[452,455,458,]),'SYMMETRIC':([289,465,],[467,652,]),'TRUE':([291,470,],[472,658,]),'FALSE':([291,470,],[473,659,]),'UNKNOWN':([291,470,],[474,660,]),'ASC':([424,426,427,743,745,918,],[-86,-85,-87,-314,-84,928,]),'DESC':([424,426,427,743,745,918,],[-86,-85,-87,-314,-84,929,]),'SMALLINT':([433,646,780,],[610,610,610,]),'INTEGER':([433,596,646,780,],[611,727,611,611,]),'BIGINT':([433,596,646,780,],[612,728,612,612,]),'DECIMAL':([433,596,646,780,],[613,729,613,613,]),'NUMERIC':([433,596,646,780,],[614,730,614,614,]),'REAL':([433,596,646,780,],[615,731,615,615,]),'DOUBLE':([433,646,780,],[616,616,616,]),'MONEY':([433,596,646,780,],[617,732,617,617,]),'VARCHAR':([433,596,646,780,],[618,737,618,618,]),'CHAR':([433,596,646,780,],[619,734,619,619,]),'CHARACTER':([433,596,646,780,],[620,733,620,620,]),'TEXT':([433,596,646,780,],[621,735,621,621,]),'DATE':([433,596,646,780,],[622,726,622,622,]),'TIME':([433,596,646,780,],[624,736,624,624,]),'KEY':([446,447,630,631,],[637,638,763,764,]),'MODE':([448,639,850,],[-28,772,-27,]),'LLAVE_ABRE':([451,],[643,]),'INTERVAL':([583,],[717,]),'BYTEA':([595,597,598,],[725,739,740,]),'PRECISION':([616,],[753,]),'VARYING':([620,],[757,]),'INHERITS':([625,],[759,]),'REFERENCES':([638,764,770,891,],[769,-91,848,-90,]),'CURRENT_USER':([643,],[776,]),'SESSION_USER':([643,],[777,]),'TYPE':([644,],[780,]),'LLAVE_CIERRA':([774,775,776,777,],[851,-37,-38,-39,]),'BY':([783,857,],[858,899,]),'ALL':([915,],[925,]),'NULLS':([927,928,929,],[934,-130,-131,]),'LAST':([934,],[936,]),'FIRST':([934,],[937,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'init':([0,],[1,]),'instrucciones':([0,],[2,]),'instruccion':([0,2,],[3,21,]),'crear_statement':([0,2,],[4,4,]),'alter_statement':([0,2,],[5,5,]),'drop_statement':([0,2,],[6,6,]),'seleccionar':([0,2,13,42,43,44,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,428,429,430,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[7,7,31,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,604,605,606,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,]),'union':([0,2,],[8,8,]),'intersect':([0,2,],[9,9,]),'except':([0,2,],[10,10,]),'or_replace':([17,],[36,]),'distinto':([20,],[42,]),'list_expression_f':([20,],[45,]),'expression_f':([20,193,],[47,353,]),'if_exists':([40,],[125,]),'select_list':([42,],[128,]),'expressiones':([42,43,44,898,],[130,190,192,917,]),'list_expression':([42,43,44,131,898,],[132,132,132,285,132,]),'expression':([42,43,44,131,134,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,217,218,219,220,221,222,223,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,249,251,252,253,254,255,256,257,258,259,266,284,287,289,290,294,295,296,297,298,299,300,301,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,323,324,325,326,327,328,329,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,423,431,443,465,467,534,535,546,550,553,554,559,567,568,594,633,650,652,653,655,671,675,678,679,684,692,693,717,718,719,720,721,722,723,742,750,762,785,788,790,798,799,804,815,829,830,833,853,860,867,898,905,908,919,],[133,133,133,286,302,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,376,377,378,379,380,381,382,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,407,414,415,416,417,418,419,420,421,422,432,461,464,466,468,475,476,477,478,479,480,481,482,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,503,504,505,506,507,508,509,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,600,607,634,651,654,708,709,710,711,712,713,714,715,716,724,766,784,786,787,789,791,792,793,794,795,796,797,807,808,809,810,811,812,813,832,835,843,859,861,862,870,871,872,880,881,882,600,896,901,902,133,920,921,930,]),'exp_alias':([47,353,],[194,533,]),'if_not_exists':([121,],[270,]),'rename_owner':([123,],[272,]),'alter_list':([124,],[276,]),'alter_op':([124,453,],[277,645,]),'list_id':([261,283,632,761,768,858,899,],[425,460,765,842,846,900,918,]),'alias':([261,283,603,632,761,768,858,899,],[426,426,745,426,426,426,426,426,]),'alias_item':([261,283,603,632,761,768,858,899,],[427,427,427,427,427,427,427,427,]),'contenido_tabla':([269,],[434,]),'manejo_tabla':([269,626,],[435,760,]),'declaracion_columna':([269,626,],[436,436,]),'condition_column':([269,278,609,626,751,781,],[437,454,752,437,836,855,]),'constraint':([269,278,609,626,751,781,],[438,438,438,438,438,438,]),'key_table':([269,278,438,609,626,751,781,],[441,441,629,441,441,441,441,]),'key_table_row':([269,278,609,626,751,781,],[442,442,442,442,442,442,]),'alter_drop':([279,],[456,]),'list_val':([423,833,],[599,883,]),'type_column':([433,646,780,],[609,781,854,]),'owner_':([448,],[639,]),'donde':([460,],[649,]),'where':([607,],[749,]),'condition_column_row':([609,],[751,]),'inherits_statement':([625,],[758,]),'list_key':([637,638,763,764,893,],[767,770,767,770,913,]),'mode_':([639,],[771,]),'ow_op':([643,],[774,]),'alter_col_op':([644,],[778,]),'group_by':([649,],[782,]),'order_by':([782,],[856,]),'group_having':([856,],[897,]),'limite':([897,],[914,]),'asc_desc':([918,],[927,]),'nulls_f_l':([927,],[933,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> init","S'",1,None,None,None),
('init -> instrucciones','init',1,'p_init','sql_grammar.py',387),
('instrucciones -> instrucciones instruccion','instrucciones',2,'p_instrucciones_lista','sql_grammar.py',392),
('instrucciones -> instruccion','instrucciones',1,'p_instrucciones_instruccion','sql_grammar.py',398),
('instruccion -> crear_statement PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',403),
('instruccion -> alter_statement PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',404),
('instruccion -> drop_statement PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',405),
('instruccion -> seleccionar PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',406),
('instruccion -> union PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',407),
('instruccion -> intersect PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',408),
('instruccion -> except PUNTOCOMA','instruccion',2,'p_instruccion','sql_grammar.py',409),
('instruccion -> SHOW DATABASES PUNTOCOMA','instruccion',3,'p_aux_instruccion','sql_grammar.py',414),
('instruccion -> INSERT INTO ID VALUES PAR_ABRE list_val PAR_CIERRA PUNTOCOMA','instruccion',8,'p_aux_instruccion','sql_grammar.py',415),
('instruccion -> INSERT INTO ID PAR_ABRE list_id PAR_CIERRA VALUES PAR_ABRE list_val PAR_CIERRA PUNTOCOMA','instruccion',11,'p_aux_instruccion','sql_grammar.py',416),
('instruccion -> UPDATE ID SET ID IGUAL expression where PUNTOCOMA','instruccion',8,'p_aux_instruccion','sql_grammar.py',417),
('instruccion -> DELETE FROM ID WHERE expression PUNTOCOMA','instruccion',6,'p_aux_instruccion','sql_grammar.py',418),
('instruccion -> DELETE FROM ID PUNTOCOMA','instruccion',4,'p_aux_instruccion','sql_grammar.py',419),
('instruccion -> USE DATABASE ID PUNTOCOMA','instruccion',4,'p_aux_instruccion','sql_grammar.py',420),
('union -> PAR_ABRE seleccionar PAR_CIERRA UNION PAR_ABRE seleccionar PAR_CIERRA','union',7,'p_union','sql_grammar.py',448),
('intersect -> PAR_ABRE seleccionar PAR_CIERRA INTERSECT PAR_ABRE seleccionar PAR_CIERRA','intersect',7,'p_intersect','sql_grammar.py',458),
('except -> PAR_ABRE seleccionar PAR_CIERRA EXCEPT PAR_ABRE seleccionar PAR_CIERRA','except',7,'p_except_','sql_grammar.py',467),
('crear_statement -> CREATE TABLE ID PAR_ABRE contenido_tabla PAR_CIERRA inherits_statement','crear_statement',7,'p_crear_statement_tbl','sql_grammar.py',476),
('crear_statement -> CREATE or_replace DATABASE if_not_exists ID owner_ mode_','crear_statement',7,'p_crear_statement_db','sql_grammar.py',483),
('or_replace -> OR REPLACE','or_replace',2,'p_or_replace_db','sql_grammar.py',490),
('or_replace -> <empty>','or_replace',0,'p_or_replace_db','sql_grammar.py',491),
('if_not_exists -> IF NOT EXISTS','if_not_exists',3,'p_if_not_exists_db','sql_grammar.py',500),
('if_not_exists -> <empty>','if_not_exists',0,'p_if_not_exists_db','sql_grammar.py',501),
('owner_ -> OWNER IGUAL ID','owner_',3,'p_owner_db','sql_grammar.py',510),
('owner_ -> <empty>','owner_',0,'p_owner_db','sql_grammar.py',511),
('mode_ -> MODE IGUAL ENTERO','mode_',3,'p_mode_db','sql_grammar.py',522),
('mode_ -> <empty>','mode_',0,'p_mode_db','sql_grammar.py',523),
('alter_statement -> ALTER DATABASE ID rename_owner','alter_statement',4,'p_alter_db','sql_grammar.py',534),
('alter_statement -> ALTER TABLE ID alter_list','alter_statement',4,'p_alter_tbl','sql_grammar.py',541),
('alter_list -> alter_list COMA alter_op','alter_list',3,'p_lista_alter','sql_grammar.py',548),
('alter_list -> alter_op','alter_list',1,'p_aux_lista_alter','sql_grammar.py',554),
('rename_owner -> RENAME TO ID','rename_owner',3,'p_rename_owner_db','sql_grammar.py',559),
('rename_owner -> OWNER TO LLAVE_ABRE ow_op LLAVE_CIERRA','rename_owner',5,'p_rename_owner_db','sql_grammar.py',560),
('ow_op -> ID','ow_op',1,'p_ow_op_db','sql_grammar.py',571),
('ow_op -> CURRENT_USER','ow_op',1,'p_ow_op_db','sql_grammar.py',572),
('ow_op -> SESSION_USER','ow_op',1,'p_ow_op_db','sql_grammar.py',573),
('drop_statement -> DROP DATABASE if_exists ID','drop_statement',4,'p_drop_db','sql_grammar.py',578),
('drop_statement -> DROP TABLE ID','drop_statement',3,'p_drop_tbl','sql_grammar.py',588),
('if_exists -> IF EXISTS','if_exists',2,'p_if_exists_db','sql_grammar.py',595),
('if_exists -> <empty>','if_exists',0,'p_if_exists_db','sql_grammar.py',596),
('contenido_tabla -> contenido_tabla COMA manejo_tabla','contenido_tabla',3,'p_contenido_tabla','sql_grammar.py',604),
('contenido_tabla -> manejo_tabla','contenido_tabla',1,'p_aux_contenido_table','sql_grammar.py',610),
('manejo_tabla -> declaracion_columna','manejo_tabla',1,'p_manejo_tabla','sql_grammar.py',615),
('manejo_tabla -> condition_column','manejo_tabla',1,'p_manejo_tabla','sql_grammar.py',616),
('declaracion_columna -> ID type_column condition_column_row','declaracion_columna',3,'p_aux_declaracion_columna','sql_grammar.py',621),
('declaracion_columna -> ID type_column','declaracion_columna',2,'p_declaracion_columna','sql_grammar.py',628),
('type_column -> SMALLINT','type_column',1,'p_type_column','sql_grammar.py',635),
('type_column -> INTEGER','type_column',1,'p_type_column','sql_grammar.py',636),
('type_column -> BIGINT','type_column',1,'p_type_column','sql_grammar.py',637),
('type_column -> DECIMAL','type_column',1,'p_type_column','sql_grammar.py',638),
('type_column -> NUMERIC','type_column',1,'p_type_column','sql_grammar.py',639),
('type_column -> REAL','type_column',1,'p_type_column','sql_grammar.py',640),
('type_column -> DOUBLE PRECISION','type_column',2,'p_type_column','sql_grammar.py',641),
('type_column -> MONEY','type_column',1,'p_type_column','sql_grammar.py',642),
('type_column -> VARCHAR PAR_ABRE ENTERO PAR_CIERRA','type_column',4,'p_type_column','sql_grammar.py',643),
('type_column -> CHAR PAR_ABRE ENTERO PAR_CIERRA','type_column',4,'p_type_column','sql_grammar.py',644),
('type_column -> CHARACTER PAR_ABRE ENTERO PAR_CIERRA','type_column',4,'p_type_column','sql_grammar.py',645),
('type_column -> CHARACTER VARYING PAR_ABRE ENTERO PAR_CIERRA','type_column',5,'p_type_column','sql_grammar.py',646),
('type_column -> TEXT','type_column',1,'p_type_column','sql_grammar.py',647),
('type_column -> DATE','type_column',1,'p_type_column','sql_grammar.py',648),
('type_column -> TIMESTAMP','type_column',1,'p_type_column','sql_grammar.py',649),
('type_column -> TIME','type_column',1,'p_type_column','sql_grammar.py',650),
('condition_column_row -> condition_column_row condition_column','condition_column_row',2,'p_condition_column_row','sql_grammar.py',684),
('condition_column_row -> condition_column','condition_column_row',1,'p_aux_condition_column_row','sql_grammar.py',690),
('condition_column -> constraint UNIQUE PAR_ABRE list_id PAR_CIERRA','condition_column',5,'p_condition_column','sql_grammar.py',695),
('condition_column -> constraint CHECK PAR_ABRE expression PAR_CIERRA','condition_column',5,'p_condition_column','sql_grammar.py',696),
('condition_column -> constraint key_table','condition_column',2,'p_condition_column','sql_grammar.py',697),
('condition_column -> key_table_row','condition_column',1,'p_aux_condition_key_table','sql_grammar.py',711),
('condition_column -> key_table','condition_column',1,'p_aux_condition_key_table','sql_grammar.py',712),
('condition_column -> DEFAULT expression','condition_column',2,'p_aux_condition_column','sql_grammar.py',717),
('condition_column -> NOT NULL','condition_column',2,'p_aux_condition_column','sql_grammar.py',718),
('condition_column -> UNIQUE PAR_ABRE list_id PAR_CIERRA','condition_column',4,'p_aux_condition_column','sql_grammar.py',719),
('condition_column -> CHECK PAR_ABRE expression PAR_CIERRA','condition_column',4,'p_aux_condition_column','sql_grammar.py',720),
('condition_column -> <empty>','condition_column',0,'p_aux_condition_column','sql_grammar.py',721),
('condition_column -> constraint UNIQUE','condition_column',2,'p_condition_unique','sql_grammar.py',741),
('condition_column -> UNIQUE','condition_column',1,'p_condition_unique','sql_grammar.py',742),
('key_table_row -> PRIMARY KEY','key_table_row',2,'p_key_table_row','sql_grammar.py',753),
('key_table_row -> FOREIGN KEY REFERENCES ID PAR_ABRE ID PAR_CIERRA','key_table_row',7,'p_key_table_row','sql_grammar.py',754),
('constraint -> CONSTRAINT ID','constraint',2,'p_constraint','sql_grammar.py',767),
('constraint -> <empty>','constraint',0,'p_constraint','sql_grammar.py',768),
('list_id -> list_id COMA alias','list_id',3,'p_list_id','sql_grammar.py',778),
('list_id -> alias','list_id',1,'p_aux_list_id','sql_grammar.py',784),
('alias -> ID','alias',1,'p_alias','sql_grammar.py',789),
('alias -> alias_item','alias',1,'p_alias','sql_grammar.py',790),
('key_table -> PRIMARY KEY list_key','key_table',3,'p_key_table','sql_grammar.py',795),
('key_table -> FOREIGN KEY list_key REFERENCES ID list_key','key_table',6,'p_key_table','sql_grammar.py',796),
('list_key -> PAR_ABRE list_id PAR_CIERRA','list_key',3,'p_list_key','sql_grammar.py',807),
('list_key -> <empty>','list_key',0,'p_list_key','sql_grammar.py',808),
('alter_op -> ADD condition_column','alter_op',2,'p_alter_op','sql_grammar.py',816),
('alter_op -> ALTER COLUMN ID alter_col_op','alter_op',4,'p_alter_op','sql_grammar.py',817),
('alter_op -> DROP alter_drop ID','alter_op',3,'p_alter_op','sql_grammar.py',818),
('alter_op -> RENAME TO ID','alter_op',3,'p_alter_op','sql_grammar.py',819),
('alter_op -> ADD COLUMN ID type_column condition_column','alter_op',5,'p_alter_op_add_col','sql_grammar.py',839),
('alter_drop -> CONSTRAINT','alter_drop',1,'p_aux_alter_op','sql_grammar.py',846),
('alter_drop -> COLUMN','alter_drop',1,'p_aux_alter_op','sql_grammar.py',847),
('op_add -> CHECK PAR_ABRE ID DIFERENTE CADENA PAR_CIERRA','op_add',6,'p_op_add','sql_grammar.py',852),
('op_add -> CONSTRAINT ID UNIQUE PAR_ABRE ID PAR_CIERRA','op_add',6,'p_op_add','sql_grammar.py',853),
('op_add -> key_table REFERENCES PAR_ABRE list_id PAR_CIERRA','op_add',5,'p_op_add','sql_grammar.py',854),
('alter_col_op -> SET NOT NULL','alter_col_op',3,'p_alter_col_op','sql_grammar.py',868),
('alter_col_op -> SET DEFAULT expression','alter_col_op',3,'p_alter_col_op','sql_grammar.py',869),
('alter_col_op -> TYPE type_column','alter_col_op',2,'p_alter_col_op','sql_grammar.py',870),
('inherits_statement -> INHERITS PAR_ABRE ID PAR_CIERRA','inherits_statement',4,'p_inherits_tbl','sql_grammar.py',884),
('inherits_statement -> <empty>','inherits_statement',0,'p_inherits_tbl','sql_grammar.py',885),
('list_val -> list_val COMA expression','list_val',3,'p_list_val','sql_grammar.py',895),
('list_val -> expression','list_val',1,'p_aux_list_val','sql_grammar.py',901),
('where -> WHERE expression','where',2,'p_where','sql_grammar.py',906),
('where -> <empty>','where',0,'p_where','sql_grammar.py',907),
('seleccionar -> SELECT distinto select_list FROM list_id donde group_by order_by group_having limite','seleccionar',10,'p_seleccionar','sql_grammar.py',918),
('seleccionar -> SELECT GREATEST expressiones','seleccionar',3,'p_aux_seleccionar','sql_grammar.py',928),
('seleccionar -> SELECT LEAST expressiones','seleccionar',3,'p_aux_seleccionar','sql_grammar.py',929),
('seleccionar -> SELECT list_expression_f','seleccionar',2,'p_seleccionar_funciones','sql_grammar.py',935),
('expressiones -> PAR_ABRE list_expression PAR_CIERRA','expressiones',3,'p_expressiones','sql_grammar.py',945),
('expressiones -> list_expression','expressiones',1,'p_aux_expressiones','sql_grammar.py',950),
('distinto -> DISTINCT','distinto',1,'p_distinto','sql_grammar.py',955),
('distinto -> <empty>','distinto',0,'p_distinto','sql_grammar.py',956),
('select_list -> ASTERISCO','select_list',1,'p_select_list','sql_grammar.py',964),
('select_list -> expressiones','select_list',1,'p_select_list','sql_grammar.py',965),
('table_expression -> expressiones','table_expression',1,'p_table_expression','sql_grammar.py',970),
('donde -> WHERE expression','donde',2,'p_donde','sql_grammar.py',975),
('donde -> <empty>','donde',0,'p_donde','sql_grammar.py',976),
('group_by -> GROUP BY list_id','group_by',3,'p_group_by','sql_grammar.py',986),
('group_by -> <empty>','group_by',0,'p_group_by','sql_grammar.py',987),
('order_by -> ORDER BY list_id asc_desc nulls_f_l','order_by',5,'p_order_by','sql_grammar.py',998),
('order_by -> <empty>','order_by',0,'p_order_by','sql_grammar.py',999),
('group_having -> HAVING expressiones','group_having',2,'p_group_having','sql_grammar.py',1009),
('group_having -> <empty>','group_having',0,'p_group_having','sql_grammar.py',1010),
('asc_desc -> ASC','asc_desc',1,'p_asc_desc','sql_grammar.py',1020),
('asc_desc -> DESC','asc_desc',1,'p_asc_desc','sql_grammar.py',1021),
('nulls_f_l -> NULLS LAST','nulls_f_l',2,'p_nulls_f_l','sql_grammar.py',1026),
('nulls_f_l -> NULLS FIRST','nulls_f_l',2,'p_nulls_f_l','sql_grammar.py',1027),
('nulls_f_l -> <empty>','nulls_f_l',0,'p_nulls_f_l','sql_grammar.py',1028),
('limite -> LIMIT ENTERO','limite',2,'p_limite','sql_grammar.py',1036),
('limite -> LIMIT ALL','limite',2,'p_limite','sql_grammar.py',1037),
('limite -> OFFSET ENTERO','limite',2,'p_limite','sql_grammar.py',1038),
('limite -> <empty>','limite',0,'p_limite','sql_grammar.py',1039),
('list_expression -> list_expression COMA expression','list_expression',3,'p_list_expression','sql_grammar.py',1049),
('list_expression -> expression','list_expression',1,'p_aux_list_expression','sql_grammar.py',1055),
('list_expression_f -> list_expression_f COMA expression_f exp_alias','list_expression_f',4,'p_list_expression_f','sql_grammar.py',1059),
('list_expression_f -> expression_f exp_alias','list_expression_f',2,'p_aux_list_expression_f','sql_grammar.py',1066),
('exp_alias -> AS CADENA','exp_alias',2,'p_exp_alias','sql_grammar.py',1071),
('exp_alias -> <empty>','exp_alias',0,'p_exp_alias','sql_grammar.py',1072),
('expression_f -> SUBSTRING PAR_ABRE expression COMA expression COMA expression PAR_CIERRA','expression_f',8,'p_expression','sql_grammar.py',1080),
('expression_f -> SUBSTR PAR_ABRE expression COMA expression COMA expression PAR_CIERRA','expression_f',8,'p_expression','sql_grammar.py',1081),
('expression -> expression NOT BETWEEN SYMMETRIC expression AND expression','expression',7,'p_expression_between3','sql_grammar.py',1091),
('expression -> expression NOT BETWEEN expression AND expression','expression',6,'p_expression_between2','sql_grammar.py',1102),
('expression -> expression BETWEEN SYMMETRIC expression AND expression','expression',6,'p_expression_between2','sql_grammar.py',1103),
('expression -> expression BETWEEN expression AND expression','expression',5,'p_expression_between','sql_grammar.py',1113),
('expression -> expression IS DISTINCT FROM expression','expression',5,'p_expression_Distinct','sql_grammar.py',1123),
('expression -> expression IS NOT DISTINCT FROM expression','expression',6,'p_expression_not_Distinct','sql_grammar.py',1133),
('expression -> ID PUNTO ID','expression',3,'p_expression_puntoId','sql_grammar.py',1144),
('expression -> expression IS NOT NULL','expression',4,'p_expression_null3','sql_grammar.py',1154),
('expression -> expression IS NOT TRUE','expression',4,'p_expression_null3','sql_grammar.py',1155),
('expression -> expression IS NOT FALSE','expression',4,'p_expression_null3','sql_grammar.py',1156),
('expression -> expression IS NOT UNKNOWN','expression',4,'p_expression_null3','sql_grammar.py',1157),
('expression -> expression IS NULL','expression',3,'p_expression_null2','sql_grammar.py',1167),
('expression -> expression IS TRUE','expression',3,'p_expression_null2','sql_grammar.py',1168),
('expression -> expression IS FALSE','expression',3,'p_expression_null2','sql_grammar.py',1169),
('expression -> expression IS UNKNOWN','expression',3,'p_expression_null2','sql_grammar.py',1170),
('expression -> expression ISNULL','expression',2,'p_expression_null','sql_grammar.py',1180),
('expression -> expression NOTNULL','expression',2,'p_expression_null','sql_grammar.py',1181),
('expression -> SUM PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1191),
('expression -> COUNT PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1192),
('expression -> AVG PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1193),
('expression -> MAX PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1194),
('expression -> MIN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1195),
('expression -> ABS PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1196),
('expression -> CBRT PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1197),
('expression -> CEIL PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1198),
('expression -> CEILING PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1199),
('expression -> DEGREES PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1200),
('expression -> DIV PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_agrupar','sql_grammar.py',1201),
('expression -> EXP PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1202),
('expression -> FACTORIAL PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1203),
('expression -> FLOOR PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1204),
('expression -> GCD PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_agrupar','sql_grammar.py',1205),
('expression -> LN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1206),
('expression -> LOG PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1207),
('expression -> MOD PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_agrupar','sql_grammar.py',1208),
('expression -> PI PAR_ABRE PAR_CIERRA','expression',3,'p_expression_agrupar','sql_grammar.py',1209),
('expression -> POWER PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_agrupar','sql_grammar.py',1210),
('expression -> RADIANS PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1211),
('expression -> ROUND PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1212),
('expression -> SIGN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1213),
('expression -> SQRT PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1214),
('expression -> WIDTH_BUCKET PAR_ABRE expression COMA expression COMA expression COMA expression PAR_CIERRA','expression',10,'p_expression_agrupar','sql_grammar.py',1215),
('expression -> TRUNC PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_agrupar','sql_grammar.py',1216),
('expression -> RANDOM PAR_ABRE PAR_CIERRA','expression',3,'p_expression_agrupar','sql_grammar.py',1217),
('expression -> ACOS PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1232),
('expression -> ACOSD PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1233),
('expression -> ASIN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1234),
('expression -> ASIND PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1235),
('expression -> ATAN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1236),
('expression -> ATAND PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1237),
('expression -> ATAN2 PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_trigonometric','sql_grammar.py',1238),
('expression -> ATAN2D PAR_ABRE expression COMA expression PAR_CIERRA','expression',6,'p_expression_trigonometric','sql_grammar.py',1239),
('expression -> COS PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1240),
('expression -> COSD PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1241),
('expression -> COT PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1242),
('expression -> COTD PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1243),
('expression -> SIN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1244),
('expression -> SIND PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1245),
('expression -> TAN PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1246),
('expression -> TAND PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1247),
('expression -> SINH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1248),
('expression -> COSH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1249),
('expression -> TANH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1250),
('expression -> ASINH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1251),
('expression -> ACOSH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1252),
('expression -> ATANH PAR_ABRE expression PAR_CIERRA','expression',4,'p_expression_trigonometric','sql_grammar.py',1253),
('expression_f -> SUM PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1266),
('expression_f -> COUNT PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1267),
('expression_f -> AVG PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1268),
('expression_f -> MAX PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1269),
('expression_f -> MIN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1270),
('expression_f -> ABS PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1271),
('expression_f -> CBRT PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1272),
('expression_f -> CEIL PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1273),
('expression_f -> CEILING PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1274),
('expression_f -> DEGREES PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1275),
('expression_f -> DIV PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_agrupar_f','sql_grammar.py',1276),
('expression_f -> EXP PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1277),
('expression_f -> FACTORIAL PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1278),
('expression_f -> FLOOR PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1279),
('expression_f -> GCD PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_agrupar_f','sql_grammar.py',1280),
('expression_f -> LN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1281),
('expression_f -> LOG PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1282),
('expression_f -> MOD PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_agrupar_f','sql_grammar.py',1283),
('expression_f -> PI PAR_ABRE PAR_CIERRA','expression_f',3,'p_expression_agrupar_f','sql_grammar.py',1284),
('expression_f -> POWER PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_agrupar_f','sql_grammar.py',1285),
('expression_f -> RADIANS PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1286),
('expression_f -> ROUND PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1287),
('expression_f -> SIGN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1288),
('expression_f -> SQRT PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1289),
('expression_f -> WIDTH_BUCKET PAR_ABRE expression COMA expression COMA expression COMA expression PAR_CIERRA','expression_f',10,'p_expression_agrupar_f','sql_grammar.py',1290),
('expression_f -> TRUNC PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_agrupar_f','sql_grammar.py',1291),
('expression_f -> RANDOM PAR_ABRE PAR_CIERRA','expression_f',3,'p_expression_agrupar_f','sql_grammar.py',1292),
('expression_f -> ACOS PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1307),
('expression_f -> ACOSD PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1308),
('expression_f -> ASIN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1309),
('expression_f -> ASIND PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1310),
('expression_f -> ATAN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1311),
('expression_f -> ATAND PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1312),
('expression_f -> ATAN2 PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_trigonometric_f','sql_grammar.py',1313),
('expression_f -> ATAN2D PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_trigonometric_f','sql_grammar.py',1314),
('expression_f -> COS PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1315),
('expression_f -> COSD PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1316),
('expression_f -> COT PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1317),
('expression_f -> COTD PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1318),
('expression_f -> SIN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1319),
('expression_f -> SIND PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1320),
('expression_f -> TAN PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1321),
('expression_f -> TAND PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1322),
('expression_f -> SINH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1323),
('expression_f -> COSH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1324),
('expression_f -> TANH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1325),
('expression_f -> ASINH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1326),
('expression_f -> ACOSH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1327),
('expression_f -> ATANH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_trigonometric_f','sql_grammar.py',1328),
('expression_f -> NOW PAR_ABRE PAR_CIERRA','expression_f',3,'p_expression_time_f','sql_grammar.py',1341),
('expression_f -> TIMESTAMP CADENA','expression_f',2,'p_expression_time_f','sql_grammar.py',1342),
('expression_f -> CURRENT_TIME','expression_f',1,'p_expression_time_f','sql_grammar.py',1343),
('expression_f -> CURRENT_DATE','expression_f',1,'p_expression_time_f','sql_grammar.py',1344),
('expression_f -> DATE_PART PAR_ABRE expression COMA INTERVAL expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1345),
('expression_f -> EXTRACT PAR_ABRE YEAR FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1346),
('expression_f -> EXTRACT PAR_ABRE MONTH FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1347),
('expression_f -> EXTRACT PAR_ABRE DAY FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1348),
('expression_f -> EXTRACT PAR_ABRE HOUR FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1349),
('expression_f -> EXTRACT PAR_ABRE MINUTE FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1350),
('expression_f -> EXTRACT PAR_ABRE SECOND FROM TIMESTAMP expression PAR_CIERRA','expression_f',7,'p_expression_time_f','sql_grammar.py',1351),
('expression_f -> LENGTH PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_string_f','sql_grammar.py',1380),
('expression_f -> TRIM PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_string_f','sql_grammar.py',1381),
('expression_f -> MD5 PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_string_f','sql_grammar.py',1382),
('expression_f -> SHA256 PAR_ABRE expression PAR_CIERRA','expression_f',4,'p_expression_string_f','sql_grammar.py',1383),
('expression_f -> DECODE PAR_ABRE expression COMA expression PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1384),
('expression_f -> ENCODE PAR_ABRE expression CASTEO BYTEA COMA expression PAR_CIERRA','expression_f',8,'p_expression_string_f','sql_grammar.py',1385),
('expression_f -> CONVERT PAR_ABRE expression AS DATE PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1386),
('expression_f -> CONVERT PAR_ABRE expression AS INTEGER PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1387),
('expression_f -> CONVERT PAR_ABRE expression AS BIGINT PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1388),
('expression_f -> CONVERT PAR_ABRE expression AS DECIMAL PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1389),
('expression_f -> CONVERT PAR_ABRE expression AS NUMERIC PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1390),
('expression_f -> CONVERT PAR_ABRE expression AS REAL PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1391),
('expression_f -> CONVERT PAR_ABRE expression AS MONEY PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1392),
('expression_f -> CONVERT PAR_ABRE expression AS CHARACTER PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1393),
('expression_f -> CONVERT PAR_ABRE expression AS CHAR PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1394),
('expression_f -> CONVERT PAR_ABRE expression AS TEXT PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1395),
('expression_f -> CONVERT PAR_ABRE expression AS TIME PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1396),
('expression_f -> CONVERT PAR_ABRE expression AS VARCHAR PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1397),
('expression_f -> CONVERT PAR_ABRE expression AS TIMESTAMP PAR_CIERRA','expression_f',6,'p_expression_string_f','sql_grammar.py',1398),
('expression_f -> GET_BYTE PAR_ABRE expression CASTEO BYTEA COMA expression PAR_CIERRA','expression_f',8,'p_expression_string_f','sql_grammar.py',1399),
('expression_f -> SET_BYTE PAR_ABRE expression CASTEO BYTEA COMA expression COMA expression PAR_CIERRA','expression_f',10,'p_expression_string_f','sql_grammar.py',1400),
('expression -> seleccionar','expression',1,'p_expression_select','sql_grammar.py',1414),
('expression -> PAR_ABRE expression PAR_CIERRA','expression',3,'p_expression_ss','sql_grammar.py',1419),
('expression -> expression MAYOR expression','expression',3,'p_expression_relacional_aux_mayor','sql_grammar.py',1424),
('expression -> expression MENOR expression','expression',3,'p_expression_relacional_aux_menor','sql_grammar.py',1431),
('expression -> expression MAYOR_IGUAL expression','expression',3,'p_expression_relacional_aux_mayorigual','sql_grammar.py',1438),
('expression -> expression MENOR_IGUAL expression','expression',3,'p_expression_relacional_aux_menorigual','sql_grammar.py',1445),
('expression -> expression IGUAL expression','expression',3,'p_expression_relacional_aux_igual','sql_grammar.py',1452),
('expression -> expression NO_IGUAL expression','expression',3,'p_expression_relacional_aux_noigual','sql_grammar.py',1459),
('expression -> expression DIFERENTE expression','expression',3,'p_expression_relacional_aux_diferente','sql_grammar.py',1466),
('expression -> expression AND expression','expression',3,'p_expression_logica_and__and','sql_grammar.py',1473),
('expression -> expression OR expression','expression',3,'p_expression_logica_or','sql_grammar.py',1480),
('expression -> NOT expression','expression',2,'p_expression_logica_not','sql_grammar.py',1487),
('expression -> ID','expression',1,'p_solouno_expression','sql_grammar.py',1494),
('expression -> ASTERISCO','expression',1,'p_solouno_expression','sql_grammar.py',1495),
('expression -> ENTERO','expression',1,'p_expression_entero','sql_grammar.py',1505),
('expression -> DECIMAL_NUM','expression',1,'p_expression_decimal','sql_grammar.py',1518),
('expression -> NULL','expression',1,'p_expression_nulo','sql_grammar.py',1540),
('expression -> CADENA','expression',1,'p_expression_cadena','sql_grammar.py',1547),
('alias_list -> alias_list COMA alias_item','alias_list',3,'p_alias_list','sql_grammar.py',1565),
('alias_list -> alias_item','alias_list',1,'p_aux_alias_list','sql_grammar.py',1570),
('alias_item -> ID AS ID','alias_item',3,'p_alias_item','sql_grammar.py',1574),
]
| 664.721739
| 171,731
| 0.712156
| 49,457
| 229,329
| 3.251774
| 0.030168
| 0.019525
| 0.023429
| 0.021639
| 0.872375
| 0.859622
| 0.838649
| 0.816326
| 0.796217
| 0.784614
| 0
| 0.564962
| 0.03478
| 229,329
| 344
| 171,732
| 666.65407
| 0.161584
| 0.000366
| 0
| 0.005988
| 1
| 0.002994
| 0.201889
| 0.017602
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
075f5909b3c17f9c8fca68940d1c838f8ef144fe
| 42,767
|
py
|
Python
|
sdk/python/pulumi_alicloud/cassandra/data_center.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/cassandra/data_center.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/cassandra/data_center.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DataCenterArgs', 'DataCenter']
@pulumi.input_type
class DataCenterArgs:
def __init__(__self__, *,
cluster_id: pulumi.Input[str],
instance_type: pulumi.Input[str],
node_count: pulumi.Input[int],
pay_type: pulumi.Input[str],
vswitch_id: pulumi.Input[str],
auto_renew: Optional[pulumi.Input[bool]] = None,
auto_renew_period: Optional[pulumi.Input[int]] = None,
data_center_name: Optional[pulumi.Input[str]] = None,
disk_size: Optional[pulumi.Input[int]] = None,
disk_type: Optional[pulumi.Input[str]] = None,
enable_public: Optional[pulumi.Input[bool]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DataCenter resource.
:param pulumi.Input[str] cluster_id: Cassandra cluster id of dataCenter-2 belongs to.
:param pulumi.Input[str] instance_type: Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
:param pulumi.Input[int] node_count: The node count of Cassandra dataCenter-2, default to 2.
:param pulumi.Input[str] pay_type: The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
:param pulumi.Input[str] vswitch_id: The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
:param pulumi.Input[bool] auto_renew: Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
:param pulumi.Input[int] auto_renew_period: Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
:param pulumi.Input[str] data_center_name: Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
:param pulumi.Input[int] disk_size: User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
:param pulumi.Input[str] disk_type: The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
:param pulumi.Input[str] zone_id: The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
pulumi.set(__self__, "cluster_id", cluster_id)
pulumi.set(__self__, "instance_type", instance_type)
pulumi.set(__self__, "node_count", node_count)
pulumi.set(__self__, "pay_type", pay_type)
pulumi.set(__self__, "vswitch_id", vswitch_id)
if auto_renew is not None:
pulumi.set(__self__, "auto_renew", auto_renew)
if auto_renew_period is not None:
pulumi.set(__self__, "auto_renew_period", auto_renew_period)
if data_center_name is not None:
pulumi.set(__self__, "data_center_name", data_center_name)
if disk_size is not None:
pulumi.set(__self__, "disk_size", disk_size)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if enable_public is not None:
pulumi.set(__self__, "enable_public", enable_public)
if period is not None:
pulumi.set(__self__, "period", period)
if period_unit is not None:
pulumi.set(__self__, "period_unit", period_unit)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Input[str]:
"""
Cassandra cluster id of dataCenter-2 belongs to.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Input[str]:
"""
Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> pulumi.Input[int]:
"""
The node count of Cassandra dataCenter-2, default to 2.
"""
return pulumi.get(self, "node_count")
@node_count.setter
def node_count(self, value: pulumi.Input[int]):
pulumi.set(self, "node_count", value)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> pulumi.Input[str]:
"""
The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
"""
return pulumi.get(self, "pay_type")
@pay_type.setter
def pay_type(self, value: pulumi.Input[str]):
pulumi.set(self, "pay_type", value)
@property
@pulumi.getter(name="vswitchId")
def vswitch_id(self) -> pulumi.Input[str]:
"""
The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
"""
return pulumi.get(self, "vswitch_id")
@vswitch_id.setter
def vswitch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vswitch_id", value)
@property
@pulumi.getter(name="autoRenew")
def auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
"""
return pulumi.get(self, "auto_renew")
@auto_renew.setter
def auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_renew", value)
@property
@pulumi.getter(name="autoRenewPeriod")
def auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
"""
return pulumi.get(self, "auto_renew_period")
@auto_renew_period.setter
def auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "auto_renew_period", value)
@property
@pulumi.getter(name="dataCenterName")
def data_center_name(self) -> Optional[pulumi.Input[str]]:
"""
Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
"""
return pulumi.get(self, "data_center_name")
@data_center_name.setter
def data_center_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_center_name", value)
@property
@pulumi.getter(name="diskSize")
def disk_size(self) -> Optional[pulumi.Input[int]]:
"""
User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
"""
return pulumi.get(self, "disk_size")
@disk_size.setter
def disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size", value)
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[pulumi.Input[str]]:
"""
The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
"""
return pulumi.get(self, "disk_type")
@disk_type.setter
def disk_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_type", value)
@property
@pulumi.getter(name="enablePublic")
def enable_public(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_public")
@enable_public.setter
def enable_public(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_public", value)
@property
@pulumi.getter
def period(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "period")
@period.setter
def period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "period", value)
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "period_unit")
@period_unit.setter
def period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "period_unit", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
@pulumi.input_type
class _DataCenterState:
def __init__(__self__, *,
auto_renew: Optional[pulumi.Input[bool]] = None,
auto_renew_period: Optional[pulumi.Input[int]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
data_center_id: Optional[pulumi.Input[str]] = None,
data_center_name: Optional[pulumi.Input[str]] = None,
disk_size: Optional[pulumi.Input[int]] = None,
disk_type: Optional[pulumi.Input[str]] = None,
enable_public: Optional[pulumi.Input[bool]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
public_points: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
status: Optional[pulumi.Input[str]] = None,
vswitch_id: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DataCenter resources.
:param pulumi.Input[bool] auto_renew: Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
:param pulumi.Input[int] auto_renew_period: Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
:param pulumi.Input[str] cluster_id: Cassandra cluster id of dataCenter-2 belongs to.
:param pulumi.Input[str] data_center_name: Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
:param pulumi.Input[int] disk_size: User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
:param pulumi.Input[str] disk_type: The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
:param pulumi.Input[str] instance_type: Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
:param pulumi.Input[int] node_count: The node count of Cassandra dataCenter-2, default to 2.
:param pulumi.Input[str] pay_type: The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
:param pulumi.Input[str] vswitch_id: The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
:param pulumi.Input[str] zone_id: The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
if auto_renew is not None:
pulumi.set(__self__, "auto_renew", auto_renew)
if auto_renew_period is not None:
pulumi.set(__self__, "auto_renew_period", auto_renew_period)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if data_center_id is not None:
pulumi.set(__self__, "data_center_id", data_center_id)
if data_center_name is not None:
pulumi.set(__self__, "data_center_name", data_center_name)
if disk_size is not None:
pulumi.set(__self__, "disk_size", disk_size)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if enable_public is not None:
pulumi.set(__self__, "enable_public", enable_public)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if node_count is not None:
pulumi.set(__self__, "node_count", node_count)
if pay_type is not None:
pulumi.set(__self__, "pay_type", pay_type)
if period is not None:
pulumi.set(__self__, "period", period)
if period_unit is not None:
pulumi.set(__self__, "period_unit", period_unit)
if public_points is not None:
pulumi.set(__self__, "public_points", public_points)
if status is not None:
pulumi.set(__self__, "status", status)
if vswitch_id is not None:
pulumi.set(__self__, "vswitch_id", vswitch_id)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="autoRenew")
def auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
"""
return pulumi.get(self, "auto_renew")
@auto_renew.setter
def auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_renew", value)
@property
@pulumi.getter(name="autoRenewPeriod")
def auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
"""
return pulumi.get(self, "auto_renew_period")
@auto_renew_period.setter
def auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "auto_renew_period", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
Cassandra cluster id of dataCenter-2 belongs to.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="dataCenterId")
def data_center_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data_center_id")
@data_center_id.setter
def data_center_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_center_id", value)
@property
@pulumi.getter(name="dataCenterName")
def data_center_name(self) -> Optional[pulumi.Input[str]]:
"""
Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
"""
return pulumi.get(self, "data_center_name")
@data_center_name.setter
def data_center_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_center_name", value)
@property
@pulumi.getter(name="diskSize")
def disk_size(self) -> Optional[pulumi.Input[int]]:
"""
User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
"""
return pulumi.get(self, "disk_size")
@disk_size.setter
def disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size", value)
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[pulumi.Input[str]]:
"""
The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
"""
return pulumi.get(self, "disk_type")
@disk_type.setter
def disk_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_type", value)
@property
@pulumi.getter(name="enablePublic")
def enable_public(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_public")
@enable_public.setter
def enable_public(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_public", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> Optional[pulumi.Input[int]]:
"""
The node count of Cassandra dataCenter-2, default to 2.
"""
return pulumi.get(self, "node_count")
@node_count.setter
def node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "node_count", value)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> Optional[pulumi.Input[str]]:
"""
The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
"""
return pulumi.get(self, "pay_type")
@pay_type.setter
def pay_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pay_type", value)
@property
@pulumi.getter
def period(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "period")
@period.setter
def period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "period", value)
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "period_unit")
@period_unit.setter
def period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "period_unit", value)
@property
@pulumi.getter(name="publicPoints")
def public_points(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "public_points")
@public_points.setter
def public_points(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "public_points", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="vswitchId")
def vswitch_id(self) -> Optional[pulumi.Input[str]]:
"""
The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
"""
return pulumi.get(self, "vswitch_id")
@vswitch_id.setter
def vswitch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vswitch_id", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class DataCenter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_renew: Optional[pulumi.Input[bool]] = None,
auto_renew_period: Optional[pulumi.Input[int]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
data_center_name: Optional[pulumi.Input[str]] = None,
disk_size: Optional[pulumi.Input[int]] = None,
disk_type: Optional[pulumi.Input[str]] = None,
enable_public: Optional[pulumi.Input[bool]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
vswitch_id: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Cassandra dataCenter resource supports replica set dataCenters only. The Cassandra provides stable, reliable, and automatic scalable database services.
It offers a full range of database solutions, such as disaster recovery, backup, recovery, monitoring, and alarms.
You can see detail product introduction [here](https://www.alibabacloud.com/help/product/49055.htm).
> **NOTE:** Available in 1.88.0+.
> **NOTE:** Create a cassandra dataCenter need a clusterId,so need create a cassandra cluster first.
> **NOTE:** The following regions support create Vpc network Cassandra cluster.
The official website mark more regions. Or you can call [DescribeRegions](https://help.aliyun.com/document_detail/157540.html).
> **NOTE:** Create Cassandra dataCenter or change dataCenter type and storage would cost 30 minutes. Please make full preparation.
## Example Usage
### Create a cassandra dataCenter
```python
import pulumi
import pulumi_alicloud as alicloud
default_cluster = alicloud.cassandra.Cluster("defaultCluster",
cluster_name="cassandra-cluster-name-tf",
data_center_name="dc-1",
auto_renew=False,
instance_type="cassandra.c.large",
major_version="3.11",
node_count=2,
pay_type="PayAsYouGo",
vswitch_id="vsw-xxxx1",
disk_size=160,
disk_type="cloud_ssd",
maintain_start_time="18:00Z",
maintain_end_time="20:00Z",
ip_white="127.0.0.1")
default_data_center = alicloud.cassandra.DataCenter("defaultDataCenter",
cluster_id=default_cluster.id,
data_center_name="dc-2",
auto_renew=False,
instance_type="cassandra.c.large",
node_count=2,
pay_type="PayAsYouGo",
vswitch_id="vsw-xxxx2",
disk_size=160,
disk_type="cloud_ssd")
```
This is a example for class netType dataCenter. You can find more detail with the examples/cassandra_data_center dir.
## Import
If you need full function, please import Cassandra cluster first. Cassandra dataCenter can be imported using the dcId:clusterId, e.g.
```sh
$ pulumi import alicloud:cassandra/dataCenter:DataCenter dc_2 cn-shenxxxx-x:cds-wz933ryoaurxxxxx
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] auto_renew: Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
:param pulumi.Input[int] auto_renew_period: Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
:param pulumi.Input[str] cluster_id: Cassandra cluster id of dataCenter-2 belongs to.
:param pulumi.Input[str] data_center_name: Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
:param pulumi.Input[int] disk_size: User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
:param pulumi.Input[str] disk_type: The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
:param pulumi.Input[str] instance_type: Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
:param pulumi.Input[int] node_count: The node count of Cassandra dataCenter-2, default to 2.
:param pulumi.Input[str] pay_type: The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
:param pulumi.Input[str] vswitch_id: The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
:param pulumi.Input[str] zone_id: The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DataCenterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Cassandra dataCenter resource supports replica set dataCenters only. The Cassandra provides stable, reliable, and automatic scalable database services.
It offers a full range of database solutions, such as disaster recovery, backup, recovery, monitoring, and alarms.
You can see detail product introduction [here](https://www.alibabacloud.com/help/product/49055.htm).
> **NOTE:** Available in 1.88.0+.
> **NOTE:** Create a cassandra dataCenter need a clusterId,so need create a cassandra cluster first.
> **NOTE:** The following regions support create Vpc network Cassandra cluster.
The official website mark more regions. Or you can call [DescribeRegions](https://help.aliyun.com/document_detail/157540.html).
> **NOTE:** Create Cassandra dataCenter or change dataCenter type and storage would cost 30 minutes. Please make full preparation.
## Example Usage
### Create a cassandra dataCenter
```python
import pulumi
import pulumi_alicloud as alicloud
default_cluster = alicloud.cassandra.Cluster("defaultCluster",
cluster_name="cassandra-cluster-name-tf",
data_center_name="dc-1",
auto_renew=False,
instance_type="cassandra.c.large",
major_version="3.11",
node_count=2,
pay_type="PayAsYouGo",
vswitch_id="vsw-xxxx1",
disk_size=160,
disk_type="cloud_ssd",
maintain_start_time="18:00Z",
maintain_end_time="20:00Z",
ip_white="127.0.0.1")
default_data_center = alicloud.cassandra.DataCenter("defaultDataCenter",
cluster_id=default_cluster.id,
data_center_name="dc-2",
auto_renew=False,
instance_type="cassandra.c.large",
node_count=2,
pay_type="PayAsYouGo",
vswitch_id="vsw-xxxx2",
disk_size=160,
disk_type="cloud_ssd")
```
This is a example for class netType dataCenter. You can find more detail with the examples/cassandra_data_center dir.
## Import
If you need full function, please import Cassandra cluster first. Cassandra dataCenter can be imported using the dcId:clusterId, e.g.
```sh
$ pulumi import alicloud:cassandra/dataCenter:DataCenter dc_2 cn-shenxxxx-x:cds-wz933ryoaurxxxxx
```
:param str resource_name: The name of the resource.
:param DataCenterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DataCenterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_renew: Optional[pulumi.Input[bool]] = None,
auto_renew_period: Optional[pulumi.Input[int]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
data_center_name: Optional[pulumi.Input[str]] = None,
disk_size: Optional[pulumi.Input[int]] = None,
disk_type: Optional[pulumi.Input[str]] = None,
enable_public: Optional[pulumi.Input[bool]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
vswitch_id: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DataCenterArgs.__new__(DataCenterArgs)
__props__.__dict__["auto_renew"] = auto_renew
__props__.__dict__["auto_renew_period"] = auto_renew_period
if cluster_id is None and not opts.urn:
raise TypeError("Missing required property 'cluster_id'")
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["data_center_name"] = data_center_name
__props__.__dict__["disk_size"] = disk_size
__props__.__dict__["disk_type"] = disk_type
__props__.__dict__["enable_public"] = enable_public
if instance_type is None and not opts.urn:
raise TypeError("Missing required property 'instance_type'")
__props__.__dict__["instance_type"] = instance_type
if node_count is None and not opts.urn:
raise TypeError("Missing required property 'node_count'")
__props__.__dict__["node_count"] = node_count
if pay_type is None and not opts.urn:
raise TypeError("Missing required property 'pay_type'")
__props__.__dict__["pay_type"] = pay_type
__props__.__dict__["period"] = period
__props__.__dict__["period_unit"] = period_unit
if vswitch_id is None and not opts.urn:
raise TypeError("Missing required property 'vswitch_id'")
__props__.__dict__["vswitch_id"] = vswitch_id
__props__.__dict__["zone_id"] = zone_id
__props__.__dict__["data_center_id"] = None
__props__.__dict__["public_points"] = None
__props__.__dict__["status"] = None
super(DataCenter, __self__).__init__(
'alicloud:cassandra/dataCenter:DataCenter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
auto_renew: Optional[pulumi.Input[bool]] = None,
auto_renew_period: Optional[pulumi.Input[int]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
data_center_id: Optional[pulumi.Input[str]] = None,
data_center_name: Optional[pulumi.Input[str]] = None,
disk_size: Optional[pulumi.Input[int]] = None,
disk_type: Optional[pulumi.Input[str]] = None,
enable_public: Optional[pulumi.Input[bool]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
node_count: Optional[pulumi.Input[int]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
public_points: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
status: Optional[pulumi.Input[str]] = None,
vswitch_id: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'DataCenter':
"""
Get an existing DataCenter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] auto_renew: Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
:param pulumi.Input[int] auto_renew_period: Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
:param pulumi.Input[str] cluster_id: Cassandra cluster id of dataCenter-2 belongs to.
:param pulumi.Input[str] data_center_name: Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
:param pulumi.Input[int] disk_size: User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
:param pulumi.Input[str] disk_type: The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
:param pulumi.Input[str] instance_type: Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
:param pulumi.Input[int] node_count: The node count of Cassandra dataCenter-2, default to 2.
:param pulumi.Input[str] pay_type: The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
:param pulumi.Input[str] vswitch_id: The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
:param pulumi.Input[str] zone_id: The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DataCenterState.__new__(_DataCenterState)
__props__.__dict__["auto_renew"] = auto_renew
__props__.__dict__["auto_renew_period"] = auto_renew_period
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["data_center_id"] = data_center_id
__props__.__dict__["data_center_name"] = data_center_name
__props__.__dict__["disk_size"] = disk_size
__props__.__dict__["disk_type"] = disk_type
__props__.__dict__["enable_public"] = enable_public
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["node_count"] = node_count
__props__.__dict__["pay_type"] = pay_type
__props__.__dict__["period"] = period
__props__.__dict__["period_unit"] = period_unit
__props__.__dict__["public_points"] = public_points
__props__.__dict__["status"] = status
__props__.__dict__["vswitch_id"] = vswitch_id
__props__.__dict__["zone_id"] = zone_id
return DataCenter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autoRenew")
def auto_renew(self) -> pulumi.Output[Optional[bool]]:
"""
Auto renew of dataCenter-2,`true` or `false`. System default to `false`, valid when pay_type = Subscription.
"""
return pulumi.get(self, "auto_renew")
@property
@pulumi.getter(name="autoRenewPeriod")
def auto_renew_period(self) -> pulumi.Output[Optional[int]]:
"""
Period of dataCenter-2 auto renew, if auto renew is `true`, one of `1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36, 60`, valid when pay_type = Subscription. Unit: month.
"""
return pulumi.get(self, "auto_renew_period")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
"""
Cassandra cluster id of dataCenter-2 belongs to.
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="dataCenterId")
def data_center_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "data_center_id")
@property
@pulumi.getter(name="dataCenterName")
def data_center_name(self) -> pulumi.Output[Optional[str]]:
"""
Cassandra dataCenter-2 name. Length must be 2~128 characters long. Only Chinese characters, English letters, numbers, period `.`, underline `_`, or dash `-` are permitted.
"""
return pulumi.get(self, "data_center_name")
@property
@pulumi.getter(name="diskSize")
def disk_size(self) -> pulumi.Output[Optional[int]]:
"""
User-defined Cassandra dataCenter one core node's storage space.Unit: GB. Value range:
- Custom storage space; value range: [160, 2000].
- 80-GB increments.
"""
return pulumi.get(self, "disk_size")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> pulumi.Output[Optional[str]]:
"""
The disk type of Cassandra dataCenter-2. Valid values are `cloud_ssd`, `cloud_efficiency`, `local_hdd_pro`, `local_ssd_pro`, local_disk size is fixed.
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="enablePublic")
def enable_public(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "enable_public")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Output[str]:
"""
Instance specification. See [Instance specifications](https://help.aliyun.com/document_detail/157445.html). Or you can call describeInstanceType api.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> pulumi.Output[int]:
"""
The node count of Cassandra dataCenter-2, default to 2.
"""
return pulumi.get(self, "node_count")
@property
@pulumi.getter(name="payType")
def pay_type(self) -> pulumi.Output[str]:
"""
The pay type of Cassandra dataCenter-2. Valid values are `Subscription`, `PayAsYouGo`. System default to `PayAsYouGo`.
"""
return pulumi.get(self, "pay_type")
@property
@pulumi.getter
def period(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "period")
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "period_unit")
@property
@pulumi.getter(name="publicPoints")
def public_points(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "public_points")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
return pulumi.get(self, "status")
@property
@pulumi.getter(name="vswitchId")
def vswitch_id(self) -> pulumi.Output[str]:
"""
The vswitch_id of dataCenter-2, mast different of vswitch_id(dc-1), can not empty.
"""
return pulumi.get(self, "vswitch_id")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[str]:
"""
The Zone to launch the Cassandra dataCenter-2. If vswitch_id is not empty, this zone_id can be "" or consistent.
"""
return pulumi.get(self, "zone_id")
| 47.15215
| 222
| 0.64807
| 5,395
| 42,767
| 4.909917
| 0.060797
| 0.078901
| 0.088225
| 0.056476
| 0.928536
| 0.912379
| 0.897958
| 0.8761
| 0.861414
| 0.840991
| 0
| 0.013213
| 0.239063
| 42,767
| 906
| 223
| 47.204194
| 0.800756
| 0.377511
| 0
| 0.766859
| 1
| 0
| 0.097594
| 0.001631
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165703
| false
| 0.001927
| 0.009634
| 0.028902
| 0.27553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab1286bc59f638f8c44cf1fbd29ab0caadce1723
| 14,727
|
py
|
Python
|
conans/test/unittests/client/generators/b2_test.py
|
matthiasng/conan
|
634eadc319da928084633a344d42785edccb8d6c
|
[
"MIT"
] | 2
|
2019-01-09T10:01:29.000Z
|
2019-01-09T10:01:31.000Z
|
conans/test/unittests/client/generators/b2_test.py
|
matthiasng/conan
|
634eadc319da928084633a344d42785edccb8d6c
|
[
"MIT"
] | 1
|
2019-01-09T10:09:41.000Z
|
2019-01-09T10:09:41.000Z
|
conans/test/unittests/client/generators/b2_test.py
|
matthiasng/conan
|
634eadc319da928084633a344d42785edccb8d6c
|
[
"MIT"
] | null | null | null |
import unittest
from conans.client.conf import get_default_settings_yml
from conans.client.generators.b2 import B2Generator
from conans.model.build_info import CppInfo
from conans.model.conan_file import ConanFile
from conans.model.env_info import EnvValues
from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.tools import TestBufferConanOutput
class B2GeneratorTest(unittest.TestCase):
def b2_test(self):
settings = Settings.loads(get_default_settings_yml())
settings.os = "Linux"
settings.compiler = "gcc"
settings.compiler.version = "6.3"
settings.arch = "x86"
settings.build_type = "Release"
settings.cppstd = "gnu17"
conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
conanfile.settings = settings
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.defines = ["MYDEFINE1"]
cpp_info.cflags.append("-Flag1=23")
cpp_info.version = "1.3"
cpp_info.description = "My cool description"
cpp_info.libs = ["MyLib1"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder2")
cpp_info.libs = ["MyLib2"]
cpp_info.defines = ["MYDEFINE2"]
cpp_info.version = "2.3"
cpp_info.exelinkflags = ["-exelinkflag"]
cpp_info.sharedlinkflags = ["-sharedlinkflag"]
cpp_info.cxxflags = ["-cxxflag"]
cpp_info.public_deps = ["MyPkg"]
cpp_info.lib_paths.extend(["Path\\with\\slashes", "regular/path/to/dir"])
cpp_info.include_paths.extend(["other\\Path\\with\\slashes", "other/regular/path/to/dir"])
conanfile.deps_cpp_info.add(ref.name, cpp_info)
generator = B2Generator(conanfile)
content = {
'conanbuildinfo.jam': _main_buildinfo_full,
'conanbuildinfo-316f2f0b155dc874a672d40d98d93f95.jam':
_variation_full,
}
for ck, cv in generator.content.items():
self.assertEqual(cv, content[ck])
def b2_empty_settings_test(self):
conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
generator = B2Generator(conanfile)
content = {
'conanbuildinfo.jam': _main_buildinfo_empty,
'conanbuildinfo-d41d8cd98f00b204e9800998ecf8427e.jam':
_variation_empty,
}
for ck, cv in generator.content.items():
self.assertEqual(cv, content[ck])
_main_buildinfo_full = '''\
#|
B2 definitions for Conan packages. This is a generated file.
Edit the corresponding conanfile.txt instead.
|#
import path ;
import project ;
import modules ;
import feature ;
local base-project = [ project.current ] ;
local base-project-mod = [ $(base-project).project-module ] ;
local base-project-location = [ project.attribute $(base-project-mod) location ] ;
rule project-define ( id )
{
id = $(id:L) ;
local saved-project = [ modules.peek project : .base-project ] ;
local id-location = [ path.join $(base-project-location) $(id) ] ;
local id-mod = [ project.load $(id-location) : synthesize ] ;
project.initialize $(id-mod) : $(id-location) ;
project.inherit-attributes $(id-mod) : $(base-project-mod) ;
local attributes = [ project.attributes $(id-mod) ] ;
$(attributes).set parent-module : $(base-project-mod) : exact ;
modules.poke $(base-project-mod) : $(id)-mod : $(id-mod) ;
modules.poke [ CALLER_MODULE ] : $(id)-mod : $(id-mod) ;
modules.poke project : .base-project : $(saved-project) ;
IMPORT $(__name__)
: constant-if call-in-project
: $(id-mod)
: constant-if call-in-project ;
if [ project.is-jamroot-module $(base-project-mod) ]
{
use-project /$(id) : $(id) ;
}
return $(id-mod) ;
}
rule constant-if ( name : value * )
{
if $(__define_constants__) && $(value)
{
call-in-project : constant $(name) : $(value) ;
modules.poke $(__name__) : $(name) : [ modules.peek $(base-project-mod) : $(name) ] ;
}
}
rule call-in-project ( project-mod ? : rule-name args * : * )
{
project-mod ?= $(base-project-mod) ;
project.push-current [ project.target $(project-mod) ] ;
local result = [ modules.call-in $(project-mod) :
$(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) :
$(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) :
$(19) ] ;
project.pop-current ;
return $(result) ;
}
rule include-conanbuildinfo ( cbi )
{
include $(cbi) ;
}
IMPORT $(__name__)
: project-define constant-if call-in-project include-conanbuildinfo
: $(base-project-mod)
: project-define constant-if call-in-project include-conanbuildinfo ;
if ! ( relwithdebinfo in [ feature.values variant ] )
{
variant relwithdebinfo : : <optimization>speed <debug-symbols>on <inlining>full <runtime-debugging>off ;
}
if ! ( minsizerel in [ feature.values variant ] )
{
variant minsizerel : : <optimization>space <debug-symbols>off <inlining>full <runtime-debugging>off ;
}
local __conanbuildinfo__ = [ GLOB $(__file__:D) : conanbuildinfo-*.jam : downcase ] ;
{
local __define_constants__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
# mypkg
project-define mypkg ;
# mypkg2
project-define mypkg2 ;
{
local __define_targets__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
'''
_variation_full = '''\
#|
B2 definitions for Conan packages. This is a generated file.
Edit the corresponding conanfile.txt instead.
|#
# global
constant-if rootpath(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
""
;
constant-if includedirs(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"other/Path/with/slashes"
"other/regular/path/to/dir"
;
constant-if libdirs(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"Path/with/slashes"
"regular/path/to/dir"
;
constant-if defines(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"MYDEFINE2"
"MYDEFINE1"
;
constant-if cppflags(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"-cxxflag"
;
constant-if cflags(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"-Flag1=23"
;
constant-if sharedlinkflags(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"-sharedlinkflag"
;
constant-if exelinkflags(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
"-exelinkflag"
;
constant-if requirements(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
<address-model>32
<architecture>x86
<cxxstd>17
<cxxstd:dialect>gnu
<target-os>linux
<toolset>gcc-6.3
<variant>release
;
constant-if usage-requirements(conan,32,x86,17,gnu,linux,gcc-6.3,release) :
<include>$(includedirs(conan,32,x86,17,gnu,linux,gcc-6.3,release))
<define>$(defines(conan,32,x86,17,gnu,linux,gcc-6.3,release))
<cflags>$(cflags(conan,32,x86,17,gnu,linux,gcc-6.3,release))
<cxxflags>$(cppflags(conan,32,x86,17,gnu,linux,gcc-6.3,release))
<link>shared:<linkflags>$(sharedlinkflags(conan,32,x86,17,gnu,linux,gcc-6.3,release))
;
# mypkg
constant-if rootpath(mypkg,32,x86,17,gnu,linux,gcc-6.3,release) :
"dummy_root_folder1"
;
constant-if defines(mypkg,32,x86,17,gnu,linux,gcc-6.3,release) :
"MYDEFINE1"
;
constant-if cflags(mypkg,32,x86,17,gnu,linux,gcc-6.3,release) :
"-Flag1=23"
;
constant-if requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release) :
<address-model>32
<architecture>x86
<cxxstd>17
<cxxstd:dialect>gnu
<target-os>linux
<toolset>gcc-6.3
<variant>release
;
constant-if usage-requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release) :
<include>$(includedirs(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
<define>$(defines(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
<cflags>$(cflags(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
<cxxflags>$(cppflags(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
<link>shared:<linkflags>$(sharedlinkflags(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
;
# mypkg2
constant-if rootpath(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"dummy_root_folder2"
;
constant-if includedirs(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"other/Path/with/slashes"
"other/regular/path/to/dir"
;
constant-if libdirs(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"Path/with/slashes"
"regular/path/to/dir"
;
constant-if defines(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"MYDEFINE2"
;
constant-if cppflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"-cxxflag"
;
constant-if sharedlinkflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"-sharedlinkflag"
;
constant-if exelinkflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
"-exelinkflag"
;
constant-if requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
<address-model>32
<architecture>x86
<cxxstd>17
<cxxstd:dialect>gnu
<target-os>linux
<toolset>gcc-6.3
<variant>release
;
constant-if usage-requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release) :
<include>$(includedirs(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
<define>$(defines(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
<cflags>$(cflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
<cxxflags>$(cppflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
<link>shared:<linkflags>$(sharedlinkflags(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
;
# mypkg
if $(__define_targets__) {
call-in-project $(mypkg-mod) : lib MyLib1
: ''' + '''
: <name>MyLib1 <search>$(libdirs(mypkg,32,x86,17,gnu,linux,gcc-6.3,release)) $(requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
:
: $(usage-requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release)) ;
call-in-project $(mypkg-mod) : explicit MyLib1 ; }
if $(__define_targets__) {
call-in-project $(mypkg-mod) : alias libs
: MyLib1
: $(requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release))
:
: $(usage-requirements(mypkg,32,x86,17,gnu,linux,gcc-6.3,release)) ;
call-in-project $(mypkg-mod) : explicit libs ; }
# mypkg2
if $(__define_targets__) {
call-in-project $(mypkg2-mod) : lib MyLib2
: /MyPkg//libs
: <name>MyLib2 <search>$(libdirs(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release)) $(requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
:
: $(usage-requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release)) ;
call-in-project $(mypkg2-mod) : explicit MyLib2 ; }
if $(__define_targets__) {
call-in-project $(mypkg2-mod) : alias libs
: /MyPkg//libs MyLib2
: $(requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release))
:
: $(usage-requirements(mypkg2,32,x86,17,gnu,linux,gcc-6.3,release)) ;
call-in-project $(mypkg2-mod) : explicit libs ; }
'''
_main_buildinfo_empty = '''\
#|
B2 definitions for Conan packages. This is a generated file.
Edit the corresponding conanfile.txt instead.
|#
import path ;
import project ;
import modules ;
import feature ;
local base-project = [ project.current ] ;
local base-project-mod = [ $(base-project).project-module ] ;
local base-project-location = [ project.attribute $(base-project-mod) location ] ;
rule project-define ( id )
{
id = $(id:L) ;
local saved-project = [ modules.peek project : .base-project ] ;
local id-location = [ path.join $(base-project-location) $(id) ] ;
local id-mod = [ project.load $(id-location) : synthesize ] ;
project.initialize $(id-mod) : $(id-location) ;
project.inherit-attributes $(id-mod) : $(base-project-mod) ;
local attributes = [ project.attributes $(id-mod) ] ;
$(attributes).set parent-module : $(base-project-mod) : exact ;
modules.poke $(base-project-mod) : $(id)-mod : $(id-mod) ;
modules.poke [ CALLER_MODULE ] : $(id)-mod : $(id-mod) ;
modules.poke project : .base-project : $(saved-project) ;
IMPORT $(__name__)
: constant-if call-in-project
: $(id-mod)
: constant-if call-in-project ;
if [ project.is-jamroot-module $(base-project-mod) ]
{
use-project /$(id) : $(id) ;
}
return $(id-mod) ;
}
rule constant-if ( name : value * )
{
if $(__define_constants__) && $(value)
{
call-in-project : constant $(name) : $(value) ;
modules.poke $(__name__) : $(name) : [ modules.peek $(base-project-mod) : $(name) ] ;
}
}
rule call-in-project ( project-mod ? : rule-name args * : * )
{
project-mod ?= $(base-project-mod) ;
project.push-current [ project.target $(project-mod) ] ;
local result = [ modules.call-in $(project-mod) :
$(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) :
$(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) :
$(19) ] ;
project.pop-current ;
return $(result) ;
}
rule include-conanbuildinfo ( cbi )
{
include $(cbi) ;
}
IMPORT $(__name__)
: project-define constant-if call-in-project include-conanbuildinfo
: $(base-project-mod)
: project-define constant-if call-in-project include-conanbuildinfo ;
if ! ( relwithdebinfo in [ feature.values variant ] )
{
variant relwithdebinfo : : <optimization>speed <debug-symbols>on <inlining>full <runtime-debugging>off ;
}
if ! ( minsizerel in [ feature.values variant ] )
{
variant minsizerel : : <optimization>space <debug-symbols>off <inlining>full <runtime-debugging>off ;
}
local __conanbuildinfo__ = [ GLOB $(__file__:D) : conanbuildinfo-*.jam : downcase ] ;
{
local __define_constants__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
{
local __define_targets__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
'''
_variation_empty = '''\
#|
B2 definitions for Conan packages. This is a generated file.
Edit the corresponding conanfile.txt instead.
|#
# global
constant-if rootpath(conan,) :
""
;
constant-if usage-requirements(conan,) :
<include>$(includedirs(conan,))
<define>$(defines(conan,))
<cflags>$(cflags(conan,))
<cxxflags>$(cppflags(conan,))
<link>shared:<linkflags>$(sharedlinkflags(conan,))
;
'''
| 31.267516
| 145
| 0.642358
| 1,866
| 14,727
| 4.958735
| 0.116292
| 0.011456
| 0.028099
| 0.052956
| 0.830758
| 0.823084
| 0.823084
| 0.819734
| 0.772614
| 0.760186
| 0
| 0.050185
| 0.192232
| 14,727
| 470
| 146
| 31.334043
| 0.72764
| 0
| 0
| 0.513784
| 0
| 0.157895
| 0.833843
| 0.287295
| 0
| 0
| 0
| 0
| 0.005013
| 1
| 0.005013
| false
| 0
| 0.052632
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab1b0246031169cf7ef010db313bb2fcda382563
| 27,036
|
py
|
Python
|
turbogears/i18n/data/uk.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | null | null | null |
turbogears/i18n/data/uk.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | 9
|
2015-01-27T19:13:56.000Z
|
2019-03-29T14:44:31.000Z
|
turbogears/i18n/data/uk.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | 13
|
2015-04-14T14:15:53.000Z
|
2020-03-18T01:05:46.000Z
|
# Formatting configuration for locale uk
languages={'gu': u'\u0413\u0443\u044f\u0440\u0430\u0442\u0456', 'gd': u'\u0413\u0430\u0435\u043b\u044c\u0441\u044c\u043a\u0430', 'ga': u'\u0406\u0440\u043b\u0430\u043d\u0434\u0441\u044c\u043a\u0430', 'gn': u'\u0413\u0443\u0430\u0440\u0430\u043d\u0456', 'gl': u'\u0413\u0430\u043b\u0456\u0441\u0456\u0439\u0441\u044c\u043a\u0430', 'la': u'\u041b\u0430\u0442\u0438\u043d\u0441\u044c\u043a\u0430', 'ln': u'\u041b\u0456\u043d\u0433\u0430\u043b\u0430', 'lo': u'\u041b\u0430\u043e\u0441\u044c\u043a\u0430', 'tt': u'\u0422\u0430\u0442\u0430\u0440\u0441\u044c\u043a\u0430', 'tr': u'\u0422\u0443\u0440\u0435\u0446\u044c\u043a\u0430', 'ts': u'\u0422\u0441\u043e\u043d\u0433\u043e', 'lv': u'\u041b\u0430\u0442\u0432\u0456\u0439\u0441\u044c\u043a\u0430', 'to': u'\u0422\u043e\u043d\u0433\u0430', 'lt': u'\u041b\u0438\u0442\u043e\u0432\u0441\u044c\u043a\u0430', 'tk': u'\u0422\u0443\u0440\u043a\u043c\u0435\u043d\u0441\u044c\u043a\u0430', 'th': u'\u0422\u0430\u0439\u0441\u044c\u043a\u0430', 'ti': u'\u0422\u0438\u0433\u0440\u0456\u043d\u0456', 'tg': u'\u0422\u0430\u0434\u0436\u0438\u0446\u044c\u043a\u0430', 'te': u'\u0422\u0435\u043b\u0443\u0433\u0443', 'ta': u'\u0422\u0430\u043c\u0456\u043b\u044c\u0441\u044c\u043a\u0430', 'yi': u'\u0406\u0434\u0438\u0448', 'yo': u'\u0419\u043e\u0440\u0443\u0431\u0430', 'de': u'\u041d\u0456\u043c\u0435\u0446\u044c\u043a\u0430', 'da': u'\u0414\u0430\u0442\u0441\u044c\u043a\u0430', 'dz': u'\u0411\u0445\u0443\u0442\u0430\u043d\u0456', 'st': u'\u0421\u0435\u0441\u043e\u0442\u0445\u043e', 'qu': u'\u041a\u0435\u0447\u0443\u0430', 'el': u'\u0413\u0440\u0435\u0446\u044c\u043a\u0430', 'eo': u'\u0415\u0441\u043f\u0435\u0440\u0430\u043d\u0442\u043e', 'en': u'\u0410\u043d\u0433\u043b\u0456\u0439\u0441\u044c\u043a\u0430', 'zh': u'\u041a\u0438\u0442\u0430\u0439\u0441\u044c\u043a\u0430', 'za': u'\u0417\u0443\u0430\u043d\u0433', 'uk': u'\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430', 'eu': u'\u0411\u0430\u0441\u043a\u0432\u0430', 'et': u'\u0415\u0441\u0442\u043e\u043d\u0441\u044c\u043a\u0430', 'es': u'\u0406\u0441\u043f\u0430\u043d\u0441\u044c\u043a\u0430', 'ru': u'\u0420\u043e\u0441\u0456\u0439\u0441\u044c\u043a\u0430', 'rw': u'\u041a\u0456\u043d\u0430\u0440\u0443\u0430\u043d\u0434\u0430', 'rm': u'\u0420\u0435\u0442\u043e\u0440\u043e\u043c\u0430\u043d\u0441\u044c\u043a\u0430', 'rn': u'\u041a\u0456\u0440\u0443\u043d\u0434\u0456\u0439\u0441\u044c\u043a\u0430', 'ro': u'\u0420\u0443\u043c\u0443\u043d\u0441\u044c\u043a\u0430', 'bn': u'\u0411\u0435\u043d\u0433\u0430\u043b\u044c\u0441\u044c\u043a\u0430', 'be': u'\u0411\u0456\u043b\u043e\u0440\u0443\u0441\u044c\u043a\u0430', 'bg': u'\u0411\u043e\u043b\u0433\u0430\u0440\u0441\u044c\u043a\u0430', 'ba': u'\u0411\u0430\u0448\u043a\u0438\u0440\u0441\u044c\u043a\u0430', 'wo': u'\u0412\u043e\u043b\u043e\u0444', 'jv': u'\u042f\u0432\u0430\u043d\u0441\u044c\u043a\u0430', 'bo': u'\u0422\u0456\u0431\u0435\u0442\u0441\u044c\u043a\u0430', 'bh': u'\u0411\u0456\u0445\u0430\u0440\u0456\u0439\u0441\u044c\u043a\u0430', 'bi': u'\u0411\u0456\u0441\u043b\u0430\u043c\u0456\u0439\u0441\u044c\u043a\u0430', 'br': u'\u0411\u0440\u0435\u0442\u043e\u043d\u0441\u044c\u043a\u0430', 'ja': u'\u042f\u043f\u043e\u043d\u0441\u044c\u043a\u0430', 'om': u'\u041e\u0440\u043e\u043c\u043e', 'oc': u'\u041e\u043a\u0438\u0442\u0430\u043d', 'tw': u'\u0422\u0432\u0456', 'or': u'\u041e\u0440\u0456\u044f', 'xh': u'\u041a\u0445\u043e\u0441\u0430', 'co': u'\u041a\u043e\u0440\u0441\u0438\u043a\u0430\u043d\u0441\u044c\u043a\u0430', 'ca': u'\u041a\u0430\u0442\u0430\u043b\u043e\u043d\u0441\u044c\u043a\u0430', 'cy': u'\u0412\u0430\u043b\u043b\u0456\u0439\u0441\u044c\u043a\u0430', 'cs': u'\u0427\u0435\u0441\u044c\u043a\u0430', 'ps': u'\u041f\u0430\u0448\u0442\u043e', 'pt': u'\u041f\u043e\u0440\u0442\u0443\u0433\u0430\u043b\u044c\u0441\u044c\u043a\u0430', 'tl': u'\u0422\u0430\u0433\u0430\u043b\u044c\u0441\u044c\u043a\u0430', 'pa': u'\u041f\u0430\u043d\u0434\u0436\u0430\u0431\u0456', 'vi': u"\u0412'\u0454\u0442\u043d\u0430\u043c\u0441\u044c\u043a\u0430", 'pl': u'\u041f\u043e\u043b\u044c\u0441\u044c\u043a\u0430', 'hy': u'\u0412\u0456\u0440\u043c\u0435\u043d\u0441\u044c\u043a\u0430', 'hr': u'\u0425\u043e\u0440\u0432\u0430\u0442\u0441\u044c\u043a\u0430', 'hu': u'\u0423\u0433\u043e\u0440\u0441\u044c\u043a\u0430', 'hi': u'\u0425\u0456\u043d\u0434\u0456', 'ha': u'\u0425\u0430\u0443\u0441\u0430', 'he': u'\u0406\u0432\u0440\u0438\u0442', 'mg': u'\u041c\u0430\u043b\u0430\u0433\u0430\u0441\u0456\u0439\u0441\u044c\u043a\u0430', 'uz': u'\u0423\u0437\u0431\u0435\u0446\u044c\u043a\u0430', 'ml': u'\u041c\u0430\u043b\u0430\u0439\u044f\u043b\u0430\u043c', 'mo': u'\u041c\u043e\u043b\u0434\u0430\u0432\u0441\u044c\u043a\u0430', 'mn': u'\u041c\u043e\u043d\u0433\u043e\u043b\u044c\u0441\u044c\u043a\u0430', 'mi': u'\u041c\u0430\u043e\u0440\u0456', 'ik': u'\u0406\u043d\u0443\u043f\u0456\u0430\u043a', 'mk': u'\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u044c\u043a\u0430', 'ur': u'\u0423\u0440\u0434\u0443', 'mt': u'\u041c\u0430\u043b\u044c\u0442\u0456\u0439\u0441\u044c\u043a\u0430', 'ms': u'\u041c\u0430\u043b\u0430\u0439\u0441\u044c\u043a\u0430', 'mr': u'\u041c\u0430\u0440\u0430\u0442\u0445\u0456', 'ug': u'\u0423\u0439\u0433\u0443\u0440\u0441\u044c\u043a\u0430', 'my': u'\u0411\u0443\u0440\u043c\u0456\u0441\u0456\u0439\u0441\u044c\u043a\u0430', 'aa': u'\u0410\u0444\u0430\u0440\u0441\u044c\u043a\u0430', 'ab': u'\u0410\u0431\u0445\u0430\u0437\u044c\u043a\u0430', 'ss': u'\u0421\u0456\u0441\u0432\u0430\u0442\u0456', 'af': u'\u0410\u0444\u0440\u0438\u043a\u0430\u043d\u0441', 'tn': u'\u0421\u0435\u0442\u0441\u0432\u0430\u043d\u0441\u044c\u043a\u0430', 'sw': u'\u0421\u0443\u0430\u0445\u0456\u043b\u0456', 'is': u'\u0406\u0441\u043b\u0430\u043d\u0434\u0441\u044c\u043a\u0430', 'am': u'\u0410\u043c\u0445\u0430\u0440\u0456\u043a', 'it': u'\u0406\u0442\u0430\u043b\u0456\u0439\u0441\u044c\u043a\u0430', 'sv': u'\u0428\u0432\u0435\u0434\u0441\u044c\u043a\u0430', 'ia': u'\u0406\u043d\u0442\u0435\u0440\u043b\u0456\u043d\u0433\u0432\u0430', 'as': u'\u0410\u0441\u0441\u0430\u043c\u0441\u044c\u043a\u0430', 'ar': u'\u0410\u0440\u0430\u0431\u0441\u044c\u043a\u0430', 'su': u'\u0421\u0443\u0434\u0430\u043d\u0441\u044c\u043a\u0430', 'zu': u'\u0417\u0443\u043b\u0443\u0441\u044c\u043a\u0430', 'ay': u'\u0410\u0443\u043c\u0430\u0440\u0430', 'az': u'\u0410\u0437\u0435\u0440\u0431\u0430\u0439\u0434\u0436\u0430\u043d\u0441\u044c\u043a\u0430', 'ie': u'\u0406\u043d\u0442\u0435\u0440\u043b\u0456\u043d\u0433\u0432\u0430', 'id': u'\u0406\u043d\u0434\u043e\u043d\u0435\u0437\u0456\u0439\u0441\u044c\u043a\u0430', 'sk': u'\u0421\u043b\u043e\u0432\u0430\u0446\u044c\u043a\u0430', 'nl': u'\u0413\u043e\u043b\u043b\u0430\u043d\u0434\u0441\u044c\u043a\u0430', 'no': u'\u041d\u043e\u0440\u0432\u0435\u0437\u044c\u043a\u0430', 'na': u'\u041d\u0430\u0443\u0440\u0443', 'ne': u'\u041d\u0435\u043f\u0430\u043b\u044c\u0441\u044c\u043a\u0430', 'vo': u'\u0412\u043e\u043b\u0430\u043f\u0430\u043a', 'fr': u'\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u044c\u043a\u0430', 'sm': u'\u0421\u0430\u043c\u043e\u0430\u043d\u0441\u044c\u043a\u0430', 'fy': u'\u0424\u0440\u0438\u0437\u044c\u043a\u0430', 'fa': u'\u041f\u0435\u0440\u0441\u044c\u043a\u0430', 'fi': u'\u0424\u0456\u043d\u0441\u044c\u043a\u0430', 'fj': u'\u0424\u0456\u0434\u0436\u0456', 'sa': u'\u0421\u0430\u043d\u0441\u043a\u0440\u0438\u0442', 'fo': u'\u0424\u0430\u0440\u0435\u0440\u0441\u044c\u043a\u0430', 'ka': u'\u0413\u0440\u0443\u0437\u0438\u043d\u0441\u044c\u043a\u0430', 'kk': u'\u041a\u0430\u0437\u0430\u0445\u0441\u044c\u043a\u0430', 'sr': u'\u0421\u0435\u0440\u0431\u0441\u044c\u043a\u0430', 'sq': u'\u0410\u043b\u0431\u0430\u043d\u0441\u044c\u043a\u0430', 'ko': u'\u041a\u043e\u0440\u0435\u0439\u0441\u044c\u043a\u0430', 'kn': u'\u0414\u0440\u0430\u0432\u0456\u0434\u0456\u0439\u0441\u044c\u043a\u0430', 'km': u'\u041a\u0430\u043c\u043f\u0443\u0447\u0456\u0439\u0441\u044c\u043a\u0430', 'kl': u'\u0413\u0440\u0438\u043d\u043b\u0430\u043d\u0434\u0456\u043a', 'ks': u'\u041a\u0430\u0448\u043c\u0456\u0440\u0441\u044c\u043a\u0430', 'si': u'\u0421\u0438\u043d\u0433\u0430\u043b\u044c\u0441\u044c\u043a\u0430', 'sh': u'\u0421\u0435\u0440\u0431\u0441\u044c\u043a\u043e-\u0445\u043e\u0440\u0432\u0430\u0442\u0441\u044c\u043a\u0430', 'so': u'\u0421\u043e\u043c\u0430\u043b\u0456', 'sn': u'\u0428\u043e\u043d\u0430', 'ku': u'\u041a\u0443\u0440\u0434\u0441\u044c\u043a\u0430', 'sl': u'\u0421\u043b\u043e\u0432\u0435\u043d\u0441\u044c\u043a\u0430', 'ky': u'\u041a\u0438\u0440\u0433\u0438\u0437\u044c\u043a\u0430', 'sg': u'\u0421\u0430\u043d\u0433\u0440\u043e', 'sd': u'\u0421\u0456\u043d\u0434\u0442\u0445\u0456'}
countries={'BD': u'\u0411\u0430\u043d\u0433\u043b\u0430\u0434\u0435\u0448', 'BE': u'\u0411\u0435\u043b\u044c\u0433\u0456\u044f', 'BF': u'\u0411\u0443\u0440\u043a\u0456\u043d\u0430-\u0424\u0430\u0441\u043e', 'BG': u'\u0411\u043e\u043b\u0433\u0430\u0440\u0456\u044f', 'BA': u'\u0411\u043e\u0441\u043d\u0456\u044f \u0456 \u0413\u0435\u0440\u0446\u0435\u0433\u043e\u0432\u0438\u043d\u0430', 'BB': u'\u0411\u0430\u0440\u0431\u0430\u0434\u043e\u0441', 'WF': u'\u0412\u0430\u043b\u043b\u0456\u0441 \u0456 \u0424\u0443\u0442\u0443\u043d\u0430, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'BM': u'\u0411\u0435\u0440\u043c\u0443\u0434\u0438', 'BN': u'\u0411\u0440\u0443\u043d\u0435\u0439', 'BO': u'\u0411\u043e\u043b\u0456\u0432\u0456\u044f', 'BH': u'\u0411\u0430\u0445\u0440\u0435\u0439\u043d', 'BI': u'\u0411\u0443\u0440\u0443\u043d\u0434\u0456', 'BJ': u'\u0411\u0435\u043d\u0456\u043d', 'BT': u'\u0411\u0443\u0442\u0430\u043d', 'JM': u'\u042f\u043c\u0430\u0439\u043a\u0430', 'BV': u'\u0411\u0443\u0432\u0435, \u043e\u0441\u0442\u0440\u0456\u0432', 'BW': u'\u0411\u043e\u0442\u0441\u0432\u0430\u043d\u0430', 'WS': u'\u0421\u0430\u043c\u043e\u0430', 'BR': u'\u0411\u0440\u0430\u0437\u0438\u043b\u0456\u044f', 'BS': u'\u0411\u0430\u0433\u0430\u043c\u0438', 'BY': u'\u0411\u0456\u043b\u043e\u0440\u0443\u0441\u044c', 'BZ': u'\u0411\u0435\u043b\u0456\u0437', 'RU': u'\u0420\u043e\u0441\u0456\u044f', 'RW': u'\u0420\u0443\u0430\u043d\u0434\u0430', 'TL': u'\u0421\u0445\u0456\u0434\u043d\u0438\u0439 \u0422\u0438\u043c\u043e\u0440', 'RE': u'\u0420\u0435\u044e\u043d\u044c\u0439\u043e\u043d', 'TM': u'\u0422\u0443\u0440\u043a\u043c\u0435\u043d\u0438\u0441\u0442\u0430\u043d', 'TJ': u'\u0422\u0430\u0434\u0436\u0438\u043a\u0438\u0441\u0442\u0430\u043d', 'RO': u'\u0420\u0443\u043c\u0443\u043d\u0456\u044f', 'TK': u'\u0422\u043e\u043a\u0435\u043b\u0430\u0443', 'GW': u'\u0413\u0432\u0456\u043d\u0435\u044f-\u0411\u0456\u0441\u0441\u0430\u0443', 'GU': u'\u0413\u0443\u0430\u043c', 'GT': u'\u0413\u0432\u0430\u0442\u0435\u043c\u0430\u043b\u0430', 'GS': u'\u041f\u0456\u0432\u0434\u0435\u043d\u043d\u0430 \u0414\u0436\u043e\u0440\u0436\u0456\u044f \u0442\u0430 \u041e\u0441\u0442\u0440\u043e\u0432\u0438 \u041f\u0456\u0432\u0434\u0435\u043d\u043d\u0438\u0439 \u0421\u0430\u043d\u0434\u0432\u0456\u0447', 'GR': u'\u0413\u0440\u0435\u0446\u0456\u044f', 'GQ': u'\u0415\u043a\u0432\u0430\u0442\u043e\u0440\u0456\u0430\u043b\u044c\u043d\u0430 \u0413\u0432\u0456\u043d\u0435\u044f', 'GP': u'\u0413\u0432\u0430\u0434\u0435\u043b\u0443\u043f\u0430', 'JP': u'\u042f\u043f\u043e\u043d\u0456\u044f', 'GY': u'\u0413\u0443\u0430\u043d\u0430', 'GF': u'\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u044c\u043a\u0430 \u0413\u0432\u0456\u0430\u043d\u0430', 'GE': u'\u0413\u0440\u0443\u0437\u0456\u044f', 'GD': u'\u0413\u0440\u0435\u043d\u0430\u0434\u0430', 'GB': u'\u0412\u0435\u043b\u0438\u043a\u043e\u0431\u0440\u0438\u0442\u0430\u043d\u0456\u044f', 'GA': u'\u0413\u0430\u0431\u043e\u043d', 'SV': u'\u0421\u0430\u043b\u044c\u0432\u0430\u0434\u043e\u0440', 'GN': u'\u0413\u0432\u0456\u043d\u0435\u044f', 'GM': u'\u0413\u0430\u043c\u0431\u0456\u044f', 'GL': u'\u0413\u0440\u0435\u043d\u043b\u0430\u043d\u0434\u0456\u044f', 'GI': u'\u0413\u0456\u0431\u0440\u0430\u043b\u0442\u0430\u0440', 'GH': u'\u0413\u0430\u043d\u0430', 'OM': u'\u041e\u043c\u0430\u043d', 'TN': u'\u0422\u0443\u043d\u0456\u0441', 'JO': u'\u0419\u043e\u0440\u0434\u0430\u043d\u0456\u044f', 'HR': u'\u0425\u043e\u0440\u0432\u0430\u0442\u0456\u044f', 'HT': u'\u0413\u0430\u0457\u0442\u0456', 'HU': u'\u0423\u0433\u043e\u0440\u0449\u0438\u043d\u0430', 'HK': u'\u0413\u043e\u043d\u043a\u043e\u043d\u0433', 'HN': u'\u0413\u043e\u043d\u0434\u0443\u0440\u0430\u0441', 'HM': u'\u041e\u0441\u0442\u0440\u043e\u0432\u0438 \u0425\u0435\u0440\u0434\u0430 \u0456 \u041c\u0430\u043a\u0434\u043e\u043d\u0430\u043b\u044c\u0434\u0441\u0430', 'VE': u'\u0412\u0435\u043d\u0435\u0441\u0443\u0435\u043b\u0430', 'PR': u'\u041f\u0443\u0435\u0440\u0442\u043e-\u0420\u0456\u043a\u043e', 'PS': u'\u041f\u0430\u043b\u0435\u0441\u0442\u0438\u043d\u0430', 'PW': u'\u041f\u0430\u043b\u0430\u0443', 'PT': u'\u041f\u043e\u0440\u0442\u0443\u0433\u0430\u043b\u0456\u044f', 'SJ': u'\u0421\u0432\u0430\u043b\u0431\u0430\u0440\u0434 \u0456 \u042f\u043d \u041c\u0430\u0439\u0454\u043d, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'PY': u'\u041f\u0430\u0440\u0430\u0433\u0432\u0430\u0439', 'IQ': u'\u0406\u0440\u0430\u043a', 'PA': u'\u041f\u0430\u043d\u0430\u043c\u0430', 'PF': u'\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u044c\u043a\u0430 \u041f\u043e\u043b\u0456\u043d\u0435\u0437\u0456\u044f', 'PG': u'\u041f\u0430\u043f\u0443\u0430 \u041d\u043e\u0432\u0430 \u0413\u0432\u0456\u043d\u0435\u044f', 'PE': u'\u041f\u0435\u0440\u0443', 'PK': u'\u041f\u0430\u043a\u0438\u0441\u0442\u0430\u043d', 'PH': u'\u0424\u0456\u043b\u0456\u043f\u043f\u0456\u043d\u0438', 'PN': u'\u041f\u0456\u0442\u043a\u0430\u0457\u0440\u043d', 'PL': u'\u041f\u043e\u043b\u044c\u0449\u0430', 'PM': u"\u0421\u0432. \u041f'\u0454\u0440 \u0456 \u041c\u0456\u043a\u0443\u043b\u043e\u043d", 'ZM': u'\u0417\u0430\u043c\u0431\u0456\u044f', 'EH': u'\u0417\u0430\u0445\u0456\u0434\u043d\u0430 \u0421\u0430\u0445\u0430\u0440\u0430', 'EE': u'\u0415\u0441\u0442\u043e\u043d\u0456\u044f', 'EG': u'\u0404\u0433\u0438\u043f\u0435\u0442', 'ZA': u'\u041f\u0410\u0420', 'EC': u'\u0415\u043a\u0432\u0430\u0434\u043e\u0440', 'IT': u'\u0406\u0442\u0430\u043b\u0456\u044f', 'VN': u"\u0412'\u0454\u0442\u043d\u0430\u043c", 'SB': u'\u0421\u043e\u043b\u043e\u043c\u043e\u043d\u043e\u0432\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438', 'ET': u'\u0415\u0444\u0456\u043e\u043f\u0456\u044f', 'SO': u'\u0421\u043e\u043c\u0430\u043b\u0456', 'ZW': u'\u0417\u0456\u043c\u0431\u0430\u0431\u0432\u0435', 'SA': u'\u0421\u0430\u0443\u0434\u0456\u0432\u0441\u044c\u043a\u0430 \u0410\u0440\u0430\u0432\u0456\u044f', 'ES': u'\u0406\u0441\u043f\u0430\u043d\u0456\u044f', 'ER': u'\u0415\u0440\u0456\u0442\u0440\u0435\u044f', 'MD': u'\u041c\u043e\u043b\u0434\u043e\u0432\u0430', 'MG': u'\u041c\u0430\u0434\u0430\u0433\u0430\u0441\u043a\u0430\u0440', 'MA': u'\u041c\u0430\u0440\u043e\u043a\u043a\u043e', 'MC': u'\u041c\u043e\u043d\u0430\u043a\u043e', 'UZ': u'\u0423\u0437\u0431\u0435\u043a\u0438\u0441\u0442\u0430\u043d', 'MM': u"\u041c'\u044f\u043d\u043c\u0430\u0440", 'ML': u'\u041c\u0430\u043b\u0456', 'MO': u'\u041c\u0430\u043a\u0430\u043e', 'MN': u'\u041c\u043e\u043d\u0433\u043e\u043b\u0456\u044f', 'MH': u'\u041c\u0430\u0440\u0448\u0430\u043b\u043e\u0432\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438', 'MK': u'\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0456\u044f', 'MU': u'\u041c\u0430\u0432\u0440\u0438\u043a\u0456\u0439', 'MT': u'\u041c\u0430\u043b\u044c\u0442\u0430', 'MW': u'\u041c\u0430\u043b\u0430\u0432\u0438', 'MV': u'\u041c\u0430\u043b\u044c\u0434\u0456\u0432\u0438', 'MQ': u'\u041c\u0430\u0440\u0442\u0438\u043d\u0456\u043a\u0430', 'MP': u'\u041f\u0456\u0432\u043d\u0456\u0447\u043d\u0430 \u041c\u0430\u0440\u0456\u0430\u043d\u0430, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'MS': u'\u041c\u043e\u043d\u0441\u0435\u0440\u0440\u0430\u0442', 'MR': u'\u041c\u0430\u0432\u0440\u0438\u0442\u0430\u043d\u0456\u044f', 'UG': u'\u0423\u0433\u0430\u043d\u0434\u0430', 'MY': u'\u041c\u0430\u043b\u0430\u0439\u0437\u0456\u044f', 'MX': u'\u041c\u0435\u043a\u0441\u0438\u043a\u0430', 'IL': u'\u0406\u0437\u0440\u0430\u0457\u043b\u044c', 'FR': u'\u0424\u0440\u0430\u043d\u0446\u0456\u044f', 'IO': u'\u0411\u0440\u0438\u0442\u0430\u043d\u0441\u044c\u043a\u0456 \u0442\u0435\u0440\u0438\u0442\u043e\u0440\u0456\u0457 \u0406\u043d\u0434\u0456\u0439\u0441\u044c\u043a\u043e\u0433\u043e \u043e\u043a\u0435\u0430\u043d\u0443', 'SH': u'\u0421\u0432. \u0404\u043b\u0435\u043d\u0430', 'FI': u'\u0424\u0456\u043d\u043b\u044f\u043d\u0434\u0456\u044f', 'FJ': u'\u0424\u0456\u0434\u0436\u0456', 'FK': u'\u0424\u043e\u043b\u043a\u043b\u0435\u043d\u0434\u0441\u044c\u043a\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438 (\u041c\u0430\u043b\u044c\u0432\u0456\u043d\u0438)', 'FM': u'\u041c\u0456\u043a\u0440\u043e\u043d\u0435\u0437\u0456\u044f', 'FO': u'\u0424\u0430\u0440\u043e, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'NI': u'\u041d\u0456\u043a\u0430\u0440\u0430\u0433\u0443\u0430', 'NL': u'\u041d\u0456\u0434\u0435\u0440\u043b\u0430\u043d\u0434\u0438', 'NO': u'\u041d\u043e\u0440\u0432\u0435\u0433\u0456\u044f', 'NA': u'\u041d\u0430\u043c\u0456\u0431\u0456\u044f', 'VU': u'\u0412\u0430\u043d\u0443\u0430\u0442\u0443', 'NC': u'\u041d\u043e\u0432\u0430 \u041a\u0430\u043b\u0435\u0434\u043e\u043d\u0456\u044f', 'NE': u'\u041d\u0456\u0433\u0435\u0440\u0456\u044f', 'NF': u'\u041d\u043e\u0440\u0444\u043e\u043b\u044c\u043a\u0441\u044c\u043a\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438', 'NG': u'\u041d\u0456\u0433\u0435\u0440\u0456\u044f', 'NZ': u'\u041d\u043e\u0432\u0430 \u0417\u0435\u043b\u0430\u043d\u0434\u0456\u044f', 'NP': u'\u041d\u0435\u043f\u0430\u043b', 'NR': u'\u041d\u0430\u0443\u0440\u0443', 'NU': u'\u041d\u0456\u044f', 'CK': u'\u041a\u0443\u043a\u0430, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'CI': u"\u041a\u043e\u0442-\u0434'\u0406\u0432\u0443\u0430\u0440", 'CH': u'\u0428\u0432\u0435\u0439\u0446\u0430\u0440\u0456\u044f', 'CO': u'\u041a\u043e\u043b\u0443\u043c\u0431\u0456\u044f', 'CN': u'\u041a\u0438\u0442\u0430\u0439', 'CM': u'\u041a\u0430\u043c\u0435\u0440\u0443\u043d', 'CL': u'\u0427\u0438\u043b\u0456', 'CC': u'\u041a\u043e\u043a\u043e\u0441\u043e\u0432\u0456 \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'CA': u'\u041a\u0430\u043d\u0430\u0434\u0430', 'CG': u'\u041a\u043e\u043d\u0433\u043e', 'CF': u'\u0426\u0435\u043d\u0442\u0440\u0430\u043b\u044c\u043d\u043e-\u0410\u0444\u0440\u0438\u043a\u0430\u043d\u0441\u044c\u043a\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'CD': u'\u041a\u043e\u043d\u0433\u043e', 'CZ': u'\u0427\u0435\u0445\u0456\u044f', 'CY': u'\u041a\u0456\u043f\u0440', 'CX': u'\u0420\u0456\u0437\u0434\u0432\u044f\u043d\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438', 'CR': u'\u041a\u043e\u0441\u0442\u0430-\u0420\u0438\u043a\u0430', 'CV': u'\u0417\u0435\u043b\u0435\u043d\u043e\u0433\u043e \u041c\u0438\u0441\u0443, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'CU': u'\u041a\u0443\u0431\u0430', 'SZ': u'\u0421\u0432\u0430\u0437\u0456\u043b\u0435\u043d\u0434', 'SY': u'\u0421\u0438\u0440\u0456\u0439\u0441\u044c\u043a\u0430 \u0410\u0440\u0430\u0431\u0441\u044c\u043a\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'KG': u'\u041a\u0438\u0440\u0433\u0438\u0437\u0441\u0442\u0430\u043d', 'KE': u'\u041a\u0435\u043d\u0456\u044f', 'SR': u'\u0421\u0443\u0440\u0456\u043d\u0430\u043c', 'KI': u'\u041a\u0456\u0440\u0438\u0431\u0430\u0442\u0456', 'KH': u'\u041a\u0430\u043c\u0431\u043e\u0434\u0436\u0430', 'KN': u'\u0421\u0432. \u041a\u0456\u0442\u0442\u0441 \u0456 \u041d\u0435\u0432\u0456\u0441', 'KM': u'\u041a\u043e\u043c\u043e\u0440\u043e\u0441', 'ST': u'\u0421\u0430\u043e \u0422\u043e\u043c \u0456 \u041f\u0440\u0456\u043d\u0441\u0456\u043f', 'SK': u'\u0421\u043b\u043e\u0432\u0430\u043a\u0456\u044f', 'KR': u'\u041a\u043e\u0440\u0435\u044f, \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'SI': u'\u0421\u043b\u043e\u0432\u0435\u043d\u0456\u044f', 'KP': u'\u041a\u043e\u0440\u0435\u044f, \u0414\u0435\u043c\u043e\u043a\u0440\u0430\u0442\u0438\u0447\u043d\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'KW': u'\u041a\u0443\u0432\u0435\u0439\u0442', 'SN': u'\u0421\u0435\u043d\u0435\u0433\u0430\u043b', 'SM': u'\u0421\u0430\u043d-\u041c\u0430\u0440\u0456\u043d\u043e', 'SL': u'\u0421\u044c\u0454\u0440\u0440\u0430-\u041b\u0435\u043e\u043d\u0435', 'SC': u'\u0421\u0435\u0439\u0448\u0435\u043b\u0438', 'KZ': u'\u041a\u0430\u0437\u0430\u0445\u0441\u0442\u0430\u043d', 'KY': u'\u041a\u0430\u0439\u043c\u0430\u043d\u043e\u0432\u0456 \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'SG': u'\u0421\u0456\u043d\u0433\u0430\u043f\u0443\u0440', 'SE': u'\u0428\u0432\u0435\u0446\u0456\u044f', 'SD': u'\u0421\u0443\u0434\u0430\u043d', 'DO': u'\u0414\u043e\u043c\u0456\u043d\u0456\u043a\u0430\u043d\u0441\u044c\u043a\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'DM': u'\u0414\u043e\u043c\u0456\u043d\u0456\u043a', 'DJ': u'\u0414\u0436\u0438\u0431\u0443\u0442\u0456', 'DK': u'\u0414\u0430\u043d\u0456\u044f', 'VG': u'\u0412\u0456\u0440\u0433\u0456\u043d\u0441\u044c\u043a\u0456 \u043e\u0441\u0442\u0440\u043e\u0432\u0438 (\u0411\u0440\u0438\u0442\u0430\u043d\u0456\u044f)', 'DE': u'\u041d\u0456\u043c\u0435\u0447\u0447\u0438\u043d\u0430', 'YE': u'\u0419\u0454\u043c\u0435\u043d', 'DZ': u'\u0410\u043b\u0436\u0438\u0440', 'US': u'\u0421\u0428\u0410', 'UY': u'\u0423\u0440\u0443\u0433\u0432\u0430\u0439', 'YU': u'\u042e\u0433\u043e\u0441\u043b\u0430\u0432\u0456\u044f', 'YT': u'\u041c\u0430\u0439\u043e\u0442', 'UM': u'\u0412\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0456 \u041e\u0441\u0442\u0440\u043e\u0432\u0438 \u0421\u0428\u0410', 'LB': u'\u041b\u0456\u0432\u0430\u043d', 'LC': u'\u0421\u0430\u043d\u0442\u0430 \u041b\u044e\u0447\u0456\u044f', 'LA': u'\u041b\u0430\u043e\u0441\u044c\u043a\u0430 \u041d\u0430\u0440\u043e\u0434\u043d\u043e-\u0414\u0435\u043c\u043e\u043a\u0440\u0430\u0442\u0438\u0447\u043d\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430', 'TV': u'\u0422\u0443\u0432\u0430\u043b\u0443', 'TW': u'\u0422\u0430\u0439\u0432\u0430\u043d\u044c', 'TT': u'\u0422\u0440\u0438\u043d\u0456\u0434\u0430\u0434 \u0456 \u0422\u0430\u0431\u0430\u0433\u043e', 'TR': u'\u0422\u0443\u0440\u0435\u0447\u0447\u0438\u043d\u0430', 'LK': u'\u0428\u0440\u0456-\u041b\u0430\u043d\u043a\u0430', 'LI': u'\u041b\u0456\u0445\u0442\u0435\u043d\u0448\u0442\u0435\u0439\u043d', 'LV': u'\u041b\u0430\u0442\u0432\u0456\u044f', 'TO': u'\u0422\u043e\u043d\u0433\u0430', 'LT': u'\u041b\u0438\u0442\u0432\u0430', 'LU': u'\u041b\u044e\u043a\u0441\u0435\u043c\u0431\u0443\u0440\u0433', 'LR': u'\u041b\u0456\u0431\u0435\u0440\u0456\u044f', 'LS': u'\u041b\u0435\u0441\u043e\u0442\u043e', 'TH': u'\u0422\u0430\u0439\u043b\u0430\u043d\u0434', 'TF': u'\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u044c\u043a\u0456 \u041f\u0456\u0432\u0434\u0435\u043d\u043d\u0456 \u0422\u0435\u0440\u0438\u0442\u043e\u0440\u0456\u0457', 'TG': u'\u0422\u043e\u0433\u043e', 'TD': u'\u0427\u0430\u0434', 'TC': u'\u0422\u0443\u0440\u043e\u043a \u0442\u0430 \u041a\u0430\u043a\u0456\u043e\u0441, \u043e\u0441\u0442\u0440\u043e\u0432\u0438', 'LY': u'\u041b\u0456\u0432\u0456\u0439\u0441\u044c\u043a\u0430 \u0410\u0440\u0430\u0431\u0441\u044c\u043a\u0430 \u0414\u0436\u0430\u043c\u0430\u0445\u0456\u0440\u0456\u044f', 'VA': u'\u0412\u0430\u0442\u0438\u043a\u0430\u043d', 'VC': u'\u0421\u0432. \u0412\u0456\u043d\u0441\u0435\u043d\u0442 \u0456 \u0413\u0440\u0435\u043d\u0430\u0434\u0438\u043d\u0438', 'AE': u'\u0421\u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0456 \u0410\u0440\u0430\u0431\u0441\u044c\u043a\u0456 \u0415\u043c\u0456\u0440\u0430\u0442\u0438', 'AD': u'\u0410\u043d\u0434\u043e\u0440\u0440\u0430', 'AG': u'\u0410\u043d\u0442\u0438\u0433\u0443\u0430 \u0456 \u0411\u0430\u0440\u0431\u0443\u0434\u0430', 'AF': u'\u0410\u0444\u0433\u0430\u043d\u0456\u0441\u0442\u0430\u043d', 'AI': u'\u0410\u043d\u0433\u0456\u043b\u044c\u044f', 'VI': u'\u0412\u0456\u0440\u0433\u0456\u043d\u0441\u044c\u043a\u0456 \u043e\u0441\u0442\u0440\u043e\u0432\u0438 (\u0421\u0428\u0410)', 'IS': u'\u0406\u0441\u043b\u0430\u043d\u0434\u0456\u044f', 'IR': u'\u0406\u0440\u0430\u043d', 'AM': u'\u0412\u0456\u0440\u043c\u0435\u043d\u0456\u044f', 'AL': u'\u0410\u043b\u0431\u0430\u043d\u0456\u044f', 'AO': u'\u0410\u043d\u0433\u043e\u043b\u0430', 'AN': u'\u041d\u0456\u0434\u0435\u0440\u043b\u0430\u043d\u0434\u0441\u044c\u043a\u0456 \u0410\u043d\u0442\u0456\u043b\u0438', 'AQ': u'\u0410\u043d\u0442\u0430\u0440\u043a\u0442\u0438\u043a\u0430', 'AS': u'\u0410\u043c\u0435\u0440\u0438\u043a\u0430\u043d\u0441\u044c\u043a\u0456 \u0421\u0430\u043c\u043e\u0430', 'AR': u'\u0410\u0440\u0433\u0435\u043d\u0442\u0438\u043d\u0430', 'AU': u'\u0410\u0432\u0441\u0442\u0440\u0430\u043b\u0456\u044f', 'AT': u'\u0410\u0432\u0441\u0442\u0440\u0456\u044f', 'AW': u'\u0410\u0440\u0443\u0431\u0430', 'IN': u'\u0406\u043d\u0434\u0456\u044f', 'TZ': u"\u0422\u0430\u043d\u0437\u0430\u043d\u0456\u044f, \u041e\u0431'\u0454\u0434\u043d\u0430\u043d\u0430 \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0456\u043a\u0430", 'AZ': u'\u0410\u0437\u0435\u0440\u0431\u0430\u0439\u0434\u0436\u0430\u043d', 'IE': u'\u0406\u0440\u043b\u0430\u043d\u0434\u0456\u044f', 'ID': u'\u0406\u043d\u0434\u043e\u043d\u0435\u0437\u0456\u044f', 'UA': u'\u0423\u043a\u0440\u0430\u0457\u043d\u0430', 'QA': u'\u041a\u0430\u0442\u0430\u0440', 'MZ': u'\u041c\u043e\u0437\u0430\u043c\u0431\u0456\u043a'}
months=[u'\u0441\u0456\u0447\u043d\u044f', u'\u043b\u044e\u0442\u043e\u0433\u043e', u'\u0431\u0435\u0440\u0435\u0437\u043d\u044f', u'\u043a\u0432\u0456\u0442\u043d\u044f', u'\u0442\u0440\u0430\u0432\u043d\u044f', u'\u0447\u0435\u0440\u0432\u043d\u044f', u'\u043b\u0438\u043f\u043d\u044f', u'\u0441\u0435\u0440\u043f\u043d\u044f', u'\u0432\u0435\u0440\u0435\u0441\u043d\u044f', u'\u0436\u043e\u0432\u0442\u043d\u044f', u'\u043b\u0438\u0441\u0442\u043e\u043f\u0430\u0434\u0430', u'\u0433\u0440\u0443\u0434\u043d\u044f']
abbrMonths=[u'\u0441\u0456\u0447', u'\u043b\u044e\u0442', u'\u0431\u0435\u0440', u'\u043a\u0432\u0456\u0442', u'\u0442\u0440\u0430\u0432', u'\u0447\u0435\u0440\u0432', u'\u043b\u0438\u043f', u'\u0441\u0435\u0440\u043f', u'\u0432\u0435\u0440', u'\u0436\u043e\u0432\u0442', u'\u043b\u0438\u0441\u0442', u'\u0433\u0440\u0443\u0434']
days=[u'\u041f\u043e\u043d\u0435\u0434\u0456\u043b\u043e\u043a', u'\u0412\u0456\u0432\u0442\u043e\u0440\u043e\u043a', u'\u0421\u0435\u0440\u0435\u0434\u0430', u'\u0427\u0435\u0442\u0432\u0435\u0440', u"\u041f'\u044f\u0442\u043d\u0438\u0446\u044f", u'\u0421\u0443\u0431\u043e\u0442\u0430', u'\u041d\u0435\u0434\u0456\u043b\u044f']
abbrDays=[u'\u041f\u043d', u'\u0412\u0442', u'\u0421\u0440', u'\u0427\u0442', u'\u041f\u0442', u'\u0421\u0431', u'\u041d\u0434']
dateFormats={'medium': '%d %%(abbrmonthname)s %Y', 'full': u'%%(dayname)s, %d %%(monthname)s %Y \u0440.', 'long': '%d %%(monthname)s %Y', 'short': '%d.%m.%y'}
numericSymbols={'group': u'\xa0', 'nativeZeroDigit': '0', 'exponential': 'E', 'perMille': u'\u2030', 'nan': u'\ufffd', 'decimal': ',', 'percentSign': '%', 'list': ';', 'patternDigit': '#', 'plusSign': '+', 'infinity': u'\u221e', 'minusSign': '-'}
| 1,502
| 16,662
| 0.734391
| 4,526
| 27,036
| 4.386876
| 0.086169
| 0.062453
| 0.075548
| 0.087635
| 0.489449
| 0.409872
| 0.29524
| 0.188013
| 0.126316
| 0.098716
| 0
| 0.516265
| 0.034657
| 27,036
| 17
| 16,663
| 1,590.352941
| 0.244492
| 0.001406
| 0
| 0
| 0
| 10.25
| 0.858794
| 0.799748
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab4460e894b9d34a2e6e3ffdc554536712207320
| 170,044
|
py
|
Python
|
src/client_libraries/python/microsoft/dynamics/customerinsights/api/customer_insights.py
|
microsoft/Dynamics365-CustomerInsights-Client-Libraries
|
e00632f7972717b03e0fb1a9e2667e8f9444a0fe
|
[
"MIT"
] | null | null | null |
src/client_libraries/python/microsoft/dynamics/customerinsights/api/customer_insights.py
|
microsoft/Dynamics365-CustomerInsights-Client-Libraries
|
e00632f7972717b03e0fb1a9e2667e8f9444a0fe
|
[
"MIT"
] | null | null | null |
src/client_libraries/python/microsoft/dynamics/customerinsights/api/customer_insights.py
|
microsoft/Dynamics365-CustomerInsights-Client-Libraries
|
e00632f7972717b03e0fb1a9e2667e8f9444a0fe
|
[
"MIT"
] | 7
|
2021-02-11T19:48:57.000Z
|
2021-12-17T08:00:15.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from . import models
class CustomerInsightsConfiguration(Configuration):
"""Configuration for CustomerInsights
Note that all parameters used to create this instance are saved as instance
attributes.
:param str base_url: Service URL
"""
def __init__(
self, base_url=None):
if not base_url:
base_url = 'https://api.ci.ai.dynamics.com/v1'
super(CustomerInsightsConfiguration, self).__init__(base_url)
self.add_user_agent('microsoft_customerinsights/{}'.format(VERSION))
class CustomerInsights(SDKClient):
"""API version 1.0
:ivar config: Configuration for client.
:vartype config: CustomerInsightsConfiguration
:param str base_url: Service URL
"""
def __init__(
self, base_url=None):
self.config = CustomerInsightsConfiguration(base_url)
super(CustomerInsights, self).__init__(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = 'v1'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
def get_an_attribute_profile(
self, instance_id, qualified_entity_name, attribute_name, custom_headers=None, raw=False, **operation_config):
"""Gets the specific attribute profile for the entity.
Gets the specific attribute profile for the entity.
:param instance_id: Format - uuid. Customer Insights instance id.
:type instance_id: str
:param qualified_entity_name: Qualified Entity Name.
:type qualified_entity_name: str
:param attribute_name: Attribute Name.
:type attribute_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AttributeDataProfile or ClientRawResponse if raw=true
:rtype:
~microsoft.dynamics.customerinsights.api.models.AttributeDataProfile
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_an_attribute_profile.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'qualifiedEntityName': self._serialize.url("qualified_entity_name", qualified_entity_name, 'str'),
'attributeName': self._serialize.url("attribute_name", attribute_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('AttributeDataProfile', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_an_attribute_profile.metadata = {'url': '/instances/{instanceId}/dataprofile/{qualifiedEntityName}/{attributeName}'}
def get_all_data_sources(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Fetches a collection of DataSourceInfo configured for the Customer
Insights instance.
Fetches a collection of DataSourceInfo configured for the Customer
Insights instance.
:param instance_id: Format - uuid. The instance id for which to fetch
data source info.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_data_sources.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[DataSourceInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_data_sources.metadata = {'url': '/instances/{instanceId}/manage/datasources'}
def get_data_source(
self, instance_id, data_source_id, custom_headers=None, raw=False, **operation_config):
"""Fetches a DataSourceInfo matching the dataSourceId configured for the
Customer Insights instance.
Fetches a DataSourceInfo matching the dataSourceId configured for the
Customer Insights instance.
:param instance_id: Format - uuid. The instance id to fetch data
source info for.
:type instance_id: str
:param data_source_id: Format - uuid. The data source id to fetch info
for.
:type data_source_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_data_source.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'dataSourceId': self._serialize.url("data_source_id", data_source_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('DataSourceInfo', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_data_source.metadata = {'url': '/instances/{instanceId}/manage/datasources/{dataSourceId}'}
def delete_a_data_source(
self, instance_id, data_source_id, custom_headers=None, raw=False, **operation_config):
"""Deletes a data source from the instance.
Deletes a data source from the instance.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param data_source_id: Format - uuid. The data source id.
:type data_source_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.delete_a_data_source.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'dataSourceId': self._serialize.url("data_source_id", data_source_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_a_data_source.metadata = {'url': '/instances/{instanceId}/manage/datasources/{dataSourceId}'}
def create_an_entity(
self, instance_id, entity_name, body=None, valid_until=None, caller=None, custom_headers=None, raw=False, **operation_config):
"""Writes an entity instance into the store, g. an activity entity.
(Preview).
Writes an entity instance into the store, g. an activity entity.
(Preview).
:param instance_id: Format - uuid. Customer Insights instance Id.
:type instance_id: str
:param entity_name: Fully qualified entity name, consisting of
'DataSource_EntityName' e.g. 'PoS_posPurchases', or 'UnifiedActivity'.
:type entity_name: str
:param body: JSON document representing the entity. The schema must be
consistent with the entity metadata. Use GET action of this resource
to obtain an example.
:type body: object
:param valid_until: Format - date-time (as date-time in RFC3339).
Expiration time of the change; ISO8601; optional. The value can be
maximum 30 days in the future. If the datasource for this entity
doesn't contain the changes after this time, the update disappears
from the store.
:type valid_until: str
:param caller: String to identify the caller; optional.
:type caller: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_an_entity.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'entityName': self._serialize.url("entity_name", entity_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if valid_until is not None:
query_parameters['validUntil'] = self._serialize.query("valid_until", valid_until, 'str')
if caller is not None:
query_parameters['caller'] = self._serialize.query("caller", caller, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'object')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [201, 202, 400, 401, 409, 429, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 201:
deserialized = self._deserialize('CreatedResult', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if response.status_code == 202:
deserialized = self._deserialize('AcceptedResult', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if response.status_code == 400:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if response.status_code == 409:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_an_entity.metadata = {'url': '/instances/{instanceId}/data/{entityName}'}
def update_an_entity(
self, instance_id, entity_name, entity_id, body=None, valid_until=None, caller=None, custom_headers=None, raw=False, **operation_config):
"""Updates an entity instance in the store, g. Customer entity. (Preview).
Updates an entity instance in the store, g. Customer entity. (Preview).
:param instance_id: Format - uuid. Customer Insights instance Id.
:type instance_id: str
:param entity_name: Fully qualified entity name, e.g. 'Customer'.
:type entity_name: str
:param entity_id: Id of the entity to update, e.g. 'CustomerId' of a
Customer entity.
:type entity_id: str
:param body: JSON document with set of changes to apply on the entity.
Each change must be consistent with the entity metadata. Use GET
action of this resource to obtain an example.
:type body: object
:param valid_until: Format - date-time (as date-time in RFC3339).
Expiration time of the change; ISO8601; optional. The value can be
maximum 30 days in the future. If the datasource for this entity
doesn't contain the changes after this time, the update disappears
from the store.
:type valid_until: str
:param caller: String to identify the caller; optional.
:type caller: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_an_entity.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'entityName': self._serialize.url("entity_name", entity_name, 'str'),
'entityId': self._serialize.url("entity_id", entity_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if valid_until is not None:
query_parameters['validUntil'] = self._serialize.query("valid_until", valid_until, 'str')
if caller is not None:
query_parameters['caller'] = self._serialize.query("caller", caller, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'object')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204, 400, 401, 404, 429, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 204:
deserialized = self._deserialize('NoContentResult', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if response.status_code == 400:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if response.status_code == 404:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_an_entity.metadata = {'url': '/instances/{instanceId}/data/{entityName}/{entityId}'}
def get_entities_with_odata_query_parameters(
self, instance_id, relative_path=None, force_search=None, proxy=None, search=None, select=None, skip_token=None, filter=None, order_by=None, expand=None, top=None, skip=None, skip_null_filter_parameters=None, custom_headers=None, raw=False, **operation_config):
"""Submits an OData request to the service.
Submits an OData request to the service.
:param instance_id: Format - uuid. Customer Insights instance id.
:type instance_id: str
:param relative_path: Relative OData path. See
https://www.odata.org/getting-started/basic-tutorial/ for info.
:type relative_path: str
:param force_search: Whether force use search to support the query.
:type force_search: bool
:param proxy: Whether or not we are requesting data by proxy.
:type proxy: bool
:param search:
:type search: str
:param select:
:type select: str
:param skip_token:
:type skip_token: str
:param filter:
:type filter: str
:param order_by:
:type order_by: str
:param expand:
:type expand: str
:param top: Format - int32.
:type top: int
:param skip: Format - int32.
:type skip: int
:param skip_null_filter_parameters:
:type skip_null_filter_parameters: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_entities_with_odata_query_parameters.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if relative_path is not None:
query_parameters['relativePath'] = self._serialize.query("relative_path", relative_path, 'str', skip_quote=True)
if force_search is not None:
query_parameters['forceSearch'] = self._serialize.query("force_search", force_search, 'bool')
if proxy is not None:
query_parameters['proxy'] = self._serialize.query("proxy", proxy, 'bool')
if search is not None:
query_parameters['Search'] = self._serialize.query("search", search, 'str')
if select is not None:
query_parameters['Select'] = self._serialize.query("select", select, 'str')
if skip_token is not None:
query_parameters['SkipToken'] = self._serialize.query("skip_token", skip_token, 'str')
if filter is not None:
query_parameters['Filter'] = self._serialize.query("filter", filter, 'str')
if order_by is not None:
query_parameters['OrderBy'] = self._serialize.query("order_by", order_by, 'str')
if expand is not None:
query_parameters['Expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['Top'] = self._serialize.query("top", top, 'int')
if skip is not None:
query_parameters['Skip'] = self._serialize.query("skip", skip, 'int')
if skip_null_filter_parameters is not None:
query_parameters['SkipNullFilterParameters'] = self._serialize.query("skip_null_filter_parameters", skip_null_filter_parameters, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ODataEntityPayload', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ODataError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_entities_with_odata_query_parameters.metadata = {'url': '/instances/{instanceId}/data'}
def get_all_entity_metadata(
self, instance_id, attributes_annotations=False, include_quarantined=False, custom_headers=None, raw=False, **operation_config):
"""Retrieves the flattened entity model for the provided instanceId.
Retrieves the flattened entity model for the provided instanceId.
:param instance_id: Format - uuid. Customer Insights instance id.
:type instance_id: str
:param attributes_annotations: Indicates if extra annotations like
'ReadOnly' or 'Mandatory' should be included.
:type attributes_annotations: bool
:param include_quarantined: Indicates if quarantined entities should
be included in the output entity model.
:type include_quarantined: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_entity_metadata.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if attributes_annotations is not None:
query_parameters['attributesAnnotations'] = self._serialize.query("attributes_annotations", attributes_annotations, 'bool')
if include_quarantined is not None:
query_parameters['includeQuarantined'] = self._serialize.query("include_quarantined", include_quarantined, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('IC360EntityModel', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_entity_metadata.metadata = {'url': '/instances/{instanceId}/manage/entities'}
def get_entity_metadata(
self, instance_id, entity_name, attributes_annotations=False, custom_headers=None, raw=False, **operation_config):
"""Retrieves the entity metadata for the provided instanceId and
entityName.
Retrieves the entity metadata for the provided instanceId and
entityName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param entity_name: Entity name
:type entity_name: str
:param attributes_annotations: Indicates if extra annotations like
'ReadOnly' or 'Mandatory' should be included.
:type attributes_annotations: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_entity_metadata.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'entityName': self._serialize.url("entity_name", entity_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if attributes_annotations is not None:
query_parameters['attributesAnnotations'] = self._serialize.query("attributes_annotations", attributes_annotations, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('IEntityMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_entity_metadata.metadata = {'url': '/instances/{instanceId}/manage/entities/{entityName}'}
def get_entity_size(
self, instance_id, entity_name, custom_headers=None, raw=False, **operation_config):
"""Retrieves the entity size for the provided instanceId and entityName.
Retrieves the entity size for the provided instanceId and entityName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param entity_name: Entity name
:type entity_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_entity_size.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'entityName': self._serialize.url("entity_name", entity_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('EntitySize', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_entity_size.metadata = {'url': '/instances/{instanceId}/manage/entities/{entityName}/entitysize'}
def reset_an_instance(
self, instance_id, instance_management_operation_scope=None, custom_headers=None, raw=False, **operation_config):
"""Reset scopes in the given instance. Provide optional management
operation scope to reset only that scope. (Preview).
Reset scopes in the given instance. Provide optional management
operation scope to reset only that scope. (Preview).
:param instance_id: Format - uuid. The instance Id.
:type instance_id: str
:param instance_management_operation_scope: The management operation
scope for reset.
:type instance_management_operation_scope: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.reset_an_instance.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if instance_management_operation_scope is not None:
query_parameters['instanceManagementOperationScope'] = self._serialize.query("instance_management_operation_scope", instance_management_operation_scope, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 403, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 403:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
reset_an_instance.metadata = {'url': '/instances/{instanceId}/manage/reset'}
def reset_instance_for_scope(
self, instance_id, instance_management_operation_scope, custom_headers=None, raw=False, **operation_config):
"""Reset scopes in the given instance. Provide optional management
operation scope to reset only that scope. (Preview).
Reset scopes in the given instance. Provide optional management
operation scope to reset only that scope. (Preview).
:param instance_id: Format - uuid. The instance Id.
:type instance_id: str
:param instance_management_operation_scope: The management operation
scope for reset.
:type instance_management_operation_scope: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.reset_instance_for_scope.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'instanceManagementOperationScope': self._serialize.url("instance_management_operation_scope", instance_management_operation_scope, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 403, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 403:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
reset_instance_for_scope.metadata = {'url': '/instances/{instanceId}/manage/reset/operationScope/{instanceManagementOperationScope}'}
def get_all_instances(
self, custom_headers=None, raw=False, **operation_config):
"""Retrieves all instances of the current user.
Retrieves all instances of the current user.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_instances.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[InstanceInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_instances.metadata = {'url': '/instances'}
def get_all_instances_in_batches_by_instanceids(
self, body=None, custom_headers=None, raw=False, **operation_config):
"""Retrieves instances based on instance ids, it can only accept batch of
instances.
Retrieves instances based on instance ids, it can only accept batch of
instances.
:param body: Instance ids of instances to get.
:type body: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_instances_in_batches_by_instanceids.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, '[str]')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[InstanceInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_instances_in_batches_by_instanceids.metadata = {'url': '/instances/batch'}
def get_instance_metadata(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Retrieves metadata for a Customer Insights instance based on its
instanceId.
Retrieves metadata for a Customer Insights instance based on its
instanceId.
:param instance_id: Format - uuid. Unique id for the Customer Insights
instance.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_instance_metadata.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_instance_metadata.metadata = {'url': '/instances/{instanceId}'}
def delete_an_instance(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Detele an instance.
Detele an instance.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.delete_an_instance.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 403, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 403:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_an_instance.metadata = {'url': '/instances/{instanceId}'}
def create_an_instance(
self, body=None, is_trial=False, custom_headers=None, raw=False, **operation_config):
"""Creates a new instance.
Creates a new instance.
:param body: The instance creation request.
:type body:
~microsoft.dynamics.customerinsights.api.models.InstanceCreationRequest
:param is_trial: True if the new instance is a trial instance. False
otherwise.
:type is_trial: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_an_instance.metadata['url']
# Construct parameters
query_parameters = {}
if is_trial is not None:
query_parameters['isTrial'] = self._serialize.query("is_trial", is_trial, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'InstanceCreationRequest')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 403, 409, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 403:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 409:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_an_instance.metadata = {'url': '/instances/V2'}
def update_an_instance(
self, instance_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Patches the Market Verticals, Display name, Domain Name, CDS
environment and BYOSA secret to the instance.
Patches the Market Verticals, Display name, Domain Name, CDS
environment and BYOSA secret to the instance.
:param instance_id: Format - uuid.
:type instance_id: str
:param body:
:type body:
~microsoft.dynamics.customerinsights.api.models.InstanceCreationRequest
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_an_instance.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'InstanceCreationRequest')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_an_instance.metadata = {'url': '/instances/{instanceId}/V2'}
def copy_an_instance(
self, body=None, is_trial=False, custom_headers=None, raw=False, **operation_config):
"""Create a new instance and copy metadata from an existing instance.
Create a new instance and copy metadata from an existing instance.
:param body: The metadata to use to create the new instance.
:type body:
~microsoft.dynamics.customerinsights.api.models.InstanceCopyRequest
:param is_trial: True if the new instance is a trial instance. False
otherwise.
:type is_trial: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.copy_an_instance.metadata['url']
# Construct parameters
query_parameters = {}
if is_trial is not None:
query_parameters['isTrial'] = self._serialize.query("is_trial", is_trial, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'InstanceCopyRequest')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 403, 409, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 403:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 409:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
copy_an_instance.metadata = {'url': '/instances/copy'}
def get_a_list_of_measures_metadata(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Retrieves a list of measures metadata for the provided instanceId.
Retrieves a list of measures metadata for the provided instanceId.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_a_list_of_measures_metadata.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[MeasureMetadata]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_a_list_of_measures_metadata.metadata = {'url': '/instances/{instanceId}/manage/measures'}
def create_a_measure(
self, instance_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Create new measure metadata with measureMetadata on instanceId.
Create new measure metadata with measureMetadata on instanceId.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param body: New Measure metadata to be created
:type body:
~microsoft.dynamics.customerinsights.api.models.MeasureMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_a_measure.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'MeasureMetadata')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('MeasureMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ParsingError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_a_measure.metadata = {'url': '/instances/{instanceId}/manage/measures'}
def get_metadata_for_a_measure(
self, instance_id, measure_name, include_historic_stats=False, historic_stats_days=30, custom_headers=None, raw=False, **operation_config):
"""Retrieves the measure metadata for the provided instanceId and
measureName.
Retrieves the measure metadata for the provided instanceId and
measureName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param measure_name: Name of the measure
:type measure_name: str
:param include_historic_stats: Boolean for historical stats
:type include_historic_stats: bool
:param historic_stats_days: Format - int32. Number of lookback days
:type historic_stats_days: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_metadata_for_a_measure.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'measureName': self._serialize.url("measure_name", measure_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if include_historic_stats is not None:
query_parameters['includeHistoricStats'] = self._serialize.query("include_historic_stats", include_historic_stats, 'bool')
if historic_stats_days is not None:
query_parameters['historicStatsDays'] = self._serialize.query("historic_stats_days", historic_stats_days, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('MeasureMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_metadata_for_a_measure.metadata = {'url': '/instances/{instanceId}/manage/measures/{measureName}'}
def update_a_measure(
self, instance_id, measure_name, body=None, custom_headers=None, raw=False, **operation_config):
"""Updates measures metadata for the provided instanceId and
measureMetadata.
Existing measure is retrieved using measureName.
Updates measures metadata for the provided instanceId and
measureMetadata.
Existing measure is retrieved using measureName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param measure_name: Name of the measure
:type measure_name: str
:param body: Update measure metadata
:type body:
~microsoft.dynamics.customerinsights.api.models.MeasureMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_a_measure.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'measureName': self._serialize.url("measure_name", measure_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'MeasureMetadata')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('MeasureMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ParsingError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_a_measure.metadata = {'url': '/instances/{instanceId}/manage/measures/{measureName}'}
def delete_a_measure(
self, instance_id, measure_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the measure metadata for the provided instanceId using
measureName.
Deletes the measure metadata for the provided instanceId using
measureName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param measure_name: Name of the measure
:type measure_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.delete_a_measure.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'measureName': self._serialize.url("measure_name", measure_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_a_measure.metadata = {'url': '/instances/{instanceId}/manage/measures/{measureName}'}
def get_key_ring(
self, instance_id, data_source_name, entity_name, key, custom_headers=None, raw=False, **operation_config):
"""Gets the KeyRing (collection of all alternate keys) for the given
instance by alternate key.
Gets the KeyRing (collection of all alternate keys) for the given
instance by alternate key.
:param instance_id: Format - uuid. Identifier for the instance
:type instance_id: str
:param data_source_name: The name of the datasource that contains the
given entity.
:type data_source_name: str
:param entity_name: The name of the entity that contains the given
key.
:type entity_name: str
:param key: The value of the alternate key provided.
:type key: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ApiErrorResult or ClientRawResponse if raw=true
:rtype: ~microsoft.dynamics.customerinsights.api.models.ApiErrorResult
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_key_ring.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['dataSourceName'] = self._serialize.query("data_source_name", data_source_name, 'str')
query_parameters['entityName'] = self._serialize.query("entity_name", entity_name, 'str')
query_parameters['key'] = self._serialize.query("key", key, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 400:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_key_ring.metadata = {'url': '/instances/{instanceId}/profile/keyring'}
def get_profile_store_state(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Gets the profile store state infoformation.
Gets the profile store state infoformation.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_profile_store_state.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('ProfileStoreStateInfo', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_profile_store_state.metadata = {'url': '/instances/{instanceId}/profilestore/stateinfo'}
def get_all_role_definitions(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Get all role definitions.
Get all role definitions.
:param instance_id:
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~microsoft.dynamics.customerinsights.api.models.RoleDefinition]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_role_definitions.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[RoleDefinition]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_role_definitions.metadata = {'url': '/instances/{instanceId}/rbac/roles'}
def get_current_user_role(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Get role of current user.
Get role of current user.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RoleAssignment or ClientRawResponse if raw=true
:rtype: ~microsoft.dynamics.customerinsights.api.models.RoleAssignment
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_current_user_role.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('RoleAssignment', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_current_user_role.metadata = {'url': '/instances/{instanceId}/rbac/myrole'}
def update_a_role_assignment(
self, instance_id, principal_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Adds or updates a role assignment for a principal.
Adds or updates a role assignment for a principal.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param principal_id: The principal id.
:type principal_id: str
:param body: The role assignment.
:type body:
~microsoft.dynamics.customerinsights.api.models.RoleAssignment
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~microsoft.dynamics.customerinsights.api.models.RoleAssignment]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_a_role_assignment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'principalId': self._serialize.url("principal_id", principal_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'RoleAssignment')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[RoleAssignment]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_a_role_assignment.metadata = {'url': '/instances/{instanceId}/rbac/principals/{principalId}/assignment'}
def deletes_a_role_assignment(
self, instance_id, principal_id, custom_headers=None, raw=False, **operation_config):
"""Deletes a role assignment for the principal.
Deletes a role assignment for the principal.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param principal_id: The principal id.
:type principal_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.deletes_a_role_assignment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'principalId': self._serialize.url("principal_id", principal_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'WWW-Authenticate': 'str',
})
return client_raw_response
deletes_a_role_assignment.metadata = {'url': '/instances/{instanceId}/rbac/principals/{principalId}/assignment'}
def get_all_role_assignments(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Gets all role assignments for the instance.
Gets all role assignments for the instance.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~microsoft.dynamics.customerinsights.api.models.RoleAssignment]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_role_assignments.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[RoleAssignment]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_role_assignments.metadata = {'url': '/instances/{instanceId}/rbac/assignments'}
def get_all_relationships(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Gets all relationship metadata for the provided instanceId.
Gets all relationship metadata for the provided instanceId.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_relationships.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[RelationshipMetadata]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_relationships.metadata = {'url': '/instances/{instanceId}/manage/relationships'}
def create_a_relationship(
self, instance_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Creates new relationship metadata for the provided instanceId, using
input.
Creates new relationship metadata for the provided instanceId, using
input.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param body: The updated relationship metadata
:type body:
~microsoft.dynamics.customerinsights.api.models.RelationshipMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_a_relationship.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'RelationshipMetadata')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('RelationshipMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_a_relationship.metadata = {'url': '/instances/{instanceId}/manage/relationships'}
def get_a_relationship(
self, instance_id, relationship_name, custom_headers=None, raw=False, **operation_config):
"""Gets the relationship metadata for the provided instanceId and
relationshipName.
Gets the relationship metadata for the provided instanceId and
relationshipName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param relationship_name: Relationship name
:type relationship_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_a_relationship.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'relationshipName': self._serialize.url("relationship_name", relationship_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('RelationshipMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_a_relationship.metadata = {'url': '/instances/{instanceId}/manage/relationships/{relationshipName}'}
def delete_a_relationship(
self, instance_id, relationship_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the relationship metadata for the provided instanceId and
relationshipName.
Deletes the relationship metadata for the provided instanceId and
relationshipName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param relationship_name: Relationship name
:type relationship_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.delete_a_relationship.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'relationshipName': self._serialize.url("relationship_name", relationship_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_a_relationship.metadata = {'url': '/instances/{instanceId}/manage/relationships/{relationshipName}'}
def update_a_relationship(
self, instance_id, relationship_name, body=None, custom_headers=None, raw=False, **operation_config):
"""Updates the relationship metadata for the provided instanceId and
relationshipName, using input.
Updates the relationship metadata for the provided instanceId and
relationshipName, using input.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param relationship_name: Relationship name
:type relationship_name: str
:param body: The updated relationship metadata
:type body:
~microsoft.dynamics.customerinsights.api.models.RelationshipMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_a_relationship.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'relationshipName': self._serialize.url("relationship_name", relationship_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'RelationshipMetadata')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('RelationshipMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_a_relationship.metadata = {'url': '/instances/{instanceId}/manage/relationships/{relationshipName}'}
def get_search_configuration(
self, instance_id, custom_headers=None, raw=False, **operation_config):
"""Gets the search configuration for the instance.
Gets the search configuration for the instance.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_search_configuration.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceSearchConfiguration', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_search_configuration.metadata = {'url': '/instances/{instanceId}/manage/search'}
def update_search_configuration(
self, instance_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Updates the search configuration for the instance.
Updates the search configuration for the instance.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param body: The search configuration for the instance.
:type body:
~microsoft.dynamics.customerinsights.api.models.InstanceSearchConfiguration
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_search_configuration.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'InstanceSearchConfiguration')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('InstanceSearchConfiguration', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 503:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_search_configuration.metadata = {'url': '/instances/{instanceId}/manage/search'}
def get_all_segments(
self, instance_id, include_historic_stats=False, historic_stats_days=10, custom_headers=None, raw=False, **operation_config):
"""Retrieves a list of segment metadata for the provided instanceId.
Retrieves a list of segment metadata for the provided instanceId.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param include_historic_stats: Optional parameter to retrieve
evaluation history of all the sgements in instanceId.
:type include_historic_stats: bool
:param historic_stats_days: Format - int32. Optional parameter to get
number of days evaluation history.
:type historic_stats_days: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_all_segments.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if include_historic_stats is not None:
query_parameters['includeHistoricStats'] = self._serialize.query("include_historic_stats", include_historic_stats, 'bool')
if historic_stats_days is not None:
query_parameters['historicStatsDays'] = self._serialize.query("historic_stats_days", historic_stats_days, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[SegmentMetadata]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_all_segments.metadata = {'url': '/instances/{instanceId}/manage/segments'}
def create_a_segment(
self, instance_id, body=None, custom_headers=None, raw=False, **operation_config):
"""Create new segment metadata with segmentMetadata on instanceId.
Create new segment metadata with segmentMetadata on instanceId.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param body: New Segment metadata to be created
:type body:
~microsoft.dynamics.customerinsights.api.models.SegmentMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_a_segment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'SegmentMetadata')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [201, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 201:
deserialized = self._deserialize('SegmentMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_a_segment.metadata = {'url': '/instances/{instanceId}/manage/segments'}
def activate_segment(
self, instance_id, segment_name, custom_headers=None, raw=False, **operation_config):
"""Activate segment on instanceId with segmentName.
Activate segment on instanceId with segmentName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param segment_name: Unique name of a segment
:type segment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.activate_segment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'segmentName': self._serialize.url("segment_name", segment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
activate_segment.metadata = {'url': '/instances/{instanceId}/manage/segments/{segmentName}/activate'}
def deactivate_segment(
self, instance_id, segment_name, custom_headers=None, raw=False, **operation_config):
"""Deactivate segment on instanceId with segmentName.
Deactivate segment on instanceId with segmentName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param segment_name: Unique name of a segment
:type segment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.deactivate_segment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'segmentName': self._serialize.url("segment_name", segment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
deactivate_segment.metadata = {'url': '/instances/{instanceId}/manage/segments/{segmentName}/deactivate'}
def update_a_segment(
self, instance_id, segment_name, body=None, custom_headers=None, raw=False, **operation_config):
"""Updates segment metadata for the provided instanceId and segmentName
with segmentMetadata.
Updates segment metadata for the provided instanceId and segmentName
with segmentMetadata.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param segment_name: Unique name of a segment
:type segment_name: str
:param body: New Segment metadata to be updated
:type body:
~microsoft.dynamics.customerinsights.api.models.SegmentMetadata
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.update_a_segment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'segmentName': self._serialize.url("segment_name", segment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'SegmentMetadata')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('SegmentMetadata', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiError', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
update_a_segment.metadata = {'url': '/instances/{instanceId}/manage/segments/{segmentName}'}
def delete_segment(
self, instance_id, segment_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the segment metadata for the provided instanceId and
segmentName.
Deletes the segment metadata for the provided instanceId and
segmentName.
:param instance_id: Format - uuid. Customer Insights instance id
:type instance_id: str
:param segment_name: Unique name of a segment
:type segment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DeletionResponse or ClientRawResponse if raw=true
:rtype:
~microsoft.dynamics.customerinsights.api.models.DeletionResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.delete_segment.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'segmentName': self._serialize.url("segment_name", segment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 400, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 400:
deserialized = self._deserialize('DeletionResponse', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
delete_segment.metadata = {'url': '/instances/{instanceId}/manage/segments/{segmentName}'}
def get_a_workflow_job_information(
self, instance_id, workflow_name, job_id, include_tasks=False, custom_headers=None, raw=False, **operation_config):
"""Retrieve information about a workflow job.
Retrieve information about a workflow job.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: The name of the workflow.
:type workflow_name: str
:param job_id: Format - uuid. The job id.
:type job_id: str
:param include_tasks: Flag to include tasks or not.
:type include_tasks: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_a_workflow_job_information.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'jobId': self._serialize.url("job_id", job_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if include_tasks is not None:
query_parameters['includeTasks'] = self._serialize.query("include_tasks", include_tasks, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('GraphJobInfo', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_a_workflow_job_information.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/jobs/{jobId}'}
def cancel_a_workflow_job(
self, instance_id, workflow_name, job_id, custom_headers=None, raw=False, **operation_config):
"""Cancel a job.
Cancel a job.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: The name of the workflow.
:type workflow_name: str
:param job_id: Format - uuid. The job id.
:type job_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.cancel_a_workflow_job.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'jobId': self._serialize.url("job_id", job_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('GraphJobInfo', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
cancel_a_workflow_job.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/jobs/{jobId}/cancel'}
def get_list_of_recent_workflow_jobs(
self, instance_id, workflow_name, top=10, include_tasks=False, custom_headers=None, raw=False, **operation_config):
"""Retrieves a list of recent job information.
Retrieves a list of recent job information.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: The name of the workflow.
:type workflow_name: str
:param top: Format - int32. The number of job informations to
retrieve.
:type top: int
:param include_tasks: Flag to include tasks or not.
:type include_tasks: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_list_of_recent_workflow_jobs.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if top is not None:
query_parameters['top'] = self._serialize.query("top", top, 'int')
if include_tasks is not None:
query_parameters['includeTasks'] = self._serialize.query("include_tasks", include_tasks, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[GraphJobInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_list_of_recent_workflow_jobs.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/jobs'}
def submit_a_workflow_job(
self, instance_id, workflow_name, body=None, operation_type=None, identifiers=None, force_run_requested=False, custom_headers=None, raw=False, **operation_config):
"""Submits a workflow of OperationTypeoperationType for the instance
specified in instanceId.
Optionally takes a list of identifiers, only if operationType is not
OperationType.All and a flag
forceRunRequested indicating whether to force run.
Submits a workflow of OperationTypeoperationType for the instance
specified in instanceId.
Optionally takes a list of identifiers, only if operationType is not
OperationType.All and a flag
forceRunRequested indicating whether to force run.
:param instance_id: Format - uuid. The Customer Insights instance id.
:type instance_id: str
:param workflow_name: The workflow name.
:type workflow_name: str
:param body: Job Creation Request.
:type body:
~microsoft.dynamics.customerinsights.api.models.OnDemandJobRequest
:param operation_type: The workflow operation type.
:type operation_type: str
:param identifiers: A list of workflow identifiers.
:type identifiers: list[str]
:param force_run_requested: Whether to request a force run.
:type force_run_requested: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.submit_a_workflow_job.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if operation_type is not None:
query_parameters['operationType'] = self._serialize.query("operation_type", operation_type, 'str')
if identifiers is not None:
query_parameters['identifiers'] = self._serialize.query("identifiers", identifiers, '[str]', div=',')
if force_run_requested is not None:
query_parameters['forceRunRequested'] = self._serialize.query("force_run_requested", force_run_requested, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'OnDemandJobRequest')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('GraphJobInfo', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
submit_a_workflow_job.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/jobs'}
def get_list_of_workflow_task_information_history(
self, instance_id, workflow_name, top=10, custom_headers=None, raw=False, **operation_config):
"""Retrieves a list of historic task information for a workflow.
Retrieves a list of historic task information for a workflow.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: The name of the workflow.
:type workflow_name: str
:param top: Format - int32. The number of task informations to
retrieve.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_list_of_workflow_task_information_history.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if top is not None:
query_parameters['top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[GraphTaskInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_list_of_workflow_task_information_history.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/history'}
def get_workflow_status(
self, instance_id, workflow_name, custom_headers=None, raw=False, **operation_config):
"""Gets the current status for a workflow.
Gets the current status for a workflow.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: The workflow name.
:type workflow_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_workflow_status.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[GraphNodeInfo]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 404:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_workflow_status.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/status'}
def get_supported_timezones(
self, workflow_name, instance_id, custom_headers=None, raw=False, **operation_config):
"""Gets a list of supported timezones for creating workflow schedules.
Gets a list of supported timezones for creating workflow schedules.
:param workflow_name: Any workflow name.
:type workflow_name: str
:param instance_id:
:type instance_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_supported_timezones.metadata['url']
path_format_arguments = {
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str'),
'instanceId': self._serialize.url("instance_id", instance_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[TimezoneDetail]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_supported_timezones.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/schedules/supportedTimezones'}
def get_workflow_schedules(
self, instance_id, workflow_name, custom_headers=None, raw=False, **operation_config):
"""Gets all workflow refresh schedules.
Gets all workflow refresh schedules.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: Any workflow name.
:type workflow_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_workflow_schedules.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('[WorkflowRefreshSchedule]', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_workflow_schedules.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/schedules'}
def create_workflow_refresh_schedule(
self, instance_id, workflow_name, body=None, custom_headers=None, raw=False, **operation_config):
"""Create a workflow refresh schedule.
Create a workflow refresh schedule.
:param instance_id: Format - uuid. The instance id.
:type instance_id: str
:param workflow_name: Any workflow name.
:type workflow_name: str
:param body: A schedule object to create.
:type body:
~microsoft.dynamics.customerinsights.api.models.WorkflowRefreshSchedule
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.create_workflow_refresh_schedule.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'workflowName': self._serialize.url("workflow_name", workflow_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'WorkflowRefreshSchedule')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('WorkflowRefreshSchedule', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if response.status_code == 500:
deserialized = self._deserialize('ApiErrorResult', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
create_workflow_refresh_schedule.metadata = {'url': '/instances/{instanceId}/workflows/{workflowName}/schedules'}
def get_an_entity_profile(
self, instance_id, qualified_entity_name, custom_headers=None, raw=False, **operation_config):
"""Gets the entityProfile for the entity.
Gets the entityProfile for the entity.
:param instance_id: Format - uuid. Customer Insights instance id.
:type instance_id: str
:param qualified_entity_name: Qualified Entity Name.
:type qualified_entity_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: EntityDataProfile or ClientRawResponse if raw=true
:rtype:
~microsoft.dynamics.customerinsights.api.models.EntityDataProfile or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_an_entity_profile.metadata['url']
path_format_arguments = {
'instanceId': self._serialize.url("instance_id", instance_id, 'str'),
'qualifiedEntityName': self._serialize.url("qualified_entity_name", qualified_entity_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 500, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('EntityDataProfile', response)
header_dict = {
'WWW-Authenticate': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get_an_entity_profile.metadata = {'url': '/instances/{instanceId}/dataprofile/{qualifiedEntityName}'}
| 40.88579
| 273
| 0.639593
| 17,224
| 170,044
| 6.110718
| 0.026068
| 0.028028
| 0.03162
| 0.039525
| 0.944181
| 0.930024
| 0.917464
| 0.900524
| 0.884058
| 0.873483
| 0
| 0.011164
| 0.273053
| 170,044
| 4,158
| 274
| 40.895623
| 0.840292
| 0.284385
| 0
| 0.781416
| 1
| 0
| 0.118451
| 0.031678
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024779
| false
| 0
| 0.002655
| 0
| 0.075664
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab61eb8bd23879c527e16fda568049acded97860
| 9,020
|
py
|
Python
|
model.py
|
parkjh688/Real-time-GesRec
|
8e2fd272c5510a16e6dcf3eb3b73d62b86241b9a
|
[
"MIT"
] | 1
|
2020-02-16T01:39:47.000Z
|
2020-02-16T01:39:47.000Z
|
model.py
|
okankop/Real-time-GesRec
|
3a57a85f0f191deaffdaedafb9d4f33d1de269f7
|
[
"MIT"
] | null | null | null |
model.py
|
okankop/Real-time-GesRec
|
3a57a85f0f191deaffdaedafb9d4f33d1de269f7
|
[
"MIT"
] | 3
|
2019-06-17T16:02:44.000Z
|
2019-08-13T19:39:57.000Z
|
import torch
from torch import nn
import pdb
from models import resnet, resnext, resnetl, c3d
def generate_model(opt):
assert opt.model in [
'resnet', 'resnetl', 'resnext', 'c3d'
]
if opt.model == 'resnet':
assert opt.model_depth in [10]
from models.resnet import get_fine_tuning_parameters
if opt.model_depth == 10:
model = resnet.resnet10(
num_classes=opt.n_classes,
shortcut_type=opt.resnet_shortcut,
sample_size=opt.sample_size,
sample_duration=opt.sample_duration)
elif opt.model == 'resnetl':
assert opt.model_depth in [10]
from models.resnetl import get_fine_tuning_parameters
if opt.model_depth == 10:
model = resnetl.resnetl10(
num_classes=opt.n_classes,
shortcut_type=opt.resnet_shortcut,
sample_size=opt.sample_size,
sample_duration=opt.sample_duration)
elif opt.model == 'resnext':
assert opt.model_depth in [101]
from models.resnext import get_fine_tuning_parameters
if opt.model_depth == 101:
model = resnext.resnet101(
num_classes=opt.n_classes,
shortcut_type=opt.resnet_shortcut,
cardinality=opt.resnext_cardinality,
sample_size=opt.sample_size,
sample_duration=opt.sample_duration)
elif opt.model == 'c3d':
assert opt.model_depth in [10 ]
from models.c3d import get_fine_tuning_parameters
if opt.model_depth == 10:
model = c3d.c3d_v1(
sample_size=opt.sample_size,
sample_duration=opt.sample_duration,
num_classes=opt.n_classes)
if not opt.no_cuda:
model = model.cuda()
model = nn.DataParallel(model, device_ids=None)
if opt.pretrain_path:
print('loading pretrained model {}'.format(opt.pretrain_path))
pretrain = torch.load(opt.pretrain_path)
assert opt.arch == pretrain['arch']
model.load_state_dict(pretrain['state_dict'])
if opt.modality == 'RGB' and opt.model != 'c3d':
print("[INFO]: RGB model is used for init model")
model = _modify_first_conv_layer(model,3,3) ##### Check models trained (3,7,7) or (7,7,7)
elif opt.modality == 'Depth':
print("[INFO]: Converting the pretrained model to Depth init model")
model = _construct_depth_model(model)
print("[INFO]: Done. Flow model ready.")
elif opt.modality == 'RGB-D':
print("[INFO]: Converting the pretrained model to RGB+D init model")
model = _construct_rgbdepth_model(model)
print("[INFO]: Done. RGB-D model ready.")
# Check first kernel size
modules = list(model.modules())
first_conv_idx = list(filter(lambda x: isinstance(modules[x], nn.Conv3d),
list(range(len(modules)))))[0]
conv_layer = modules[first_conv_idx]
if conv_layer.kernel_size[0]> opt.sample_duration:
print("[INFO]: RGB model is used for init model")
model = _modify_first_conv_layer(model,int(opt.sample_duration/2),1)
if opt.model == 'c3d':# CHECK HERE
model.module.fc = nn.Linear(
model.module.fc[0].in_features, model.module.fc[0].out_features)
model.module.fc = model.module.fc.cuda()
else:
model.module.fc = nn.Linear(model.module.fc.in_features,
opt.n_finetune_classes)
model.module.fc = model.module.fc.cuda()
parameters = get_fine_tuning_parameters(model, opt.ft_begin_index)
return model, parameters
else:
if opt.pretrain_path:
print('loading pretrained model {}'.format(opt.pretrain_path))
pretrain = torch.load(opt.pretrain_path)
assert opt.arch == pretrain['arch']
model.load_state_dict(pretrain['state_dict'])
if opt.modality == 'RGB' and opt.model != 'c3d':
print("[INFO]: RGB model is used for init model")
model = _modify_first_conv_layer(model,3,3)
elif opt.modality == 'Depth':
print("[INFO]: Converting the pretrained model to Depth init model")
model = _construct_depth_model(model)
print("[INFO]: Deoth model ready.")
elif opt.modality == 'RGB-D':
print("[INFO]: Converting the pretrained model to RGB-D init model")
model = _construct_rgbdepth_model(model)
print("[INFO]: Done. RGB-D model ready.")
# Check first kernel size
modules = list(model.modules())
first_conv_idx = list(filter(lambda x: isinstance(modules[x], nn.Conv3d),
list(range(len(modules)))))[0]
conv_layer = modules[first_conv_idx]
if conv_layer.kernel_size[0]> opt.sample_duration:
print("[INFO]: RGB model is used for init model")
model = _modify_first_conv_layer(model,int(opt.sample_duration/2),1)
if opt.model == 'c3d':# CHECK HERE
model.fc = nn.Linear(
model.fc[0].in_features, model.fc[0].out_features)
else:
model.fc = nn.Linear(model.fc.in_features,
opt.n_finetune_classes)
parameters = get_fine_tuning_parameters(model, opt.ft_begin_index)
return model, parameters
return model, model.parameters()
def _construct_depth_model(base_model):
# modify the first convolution kernels for Depth input
modules = list(base_model.modules())
first_conv_idx = list(filter(lambda x: isinstance(modules[x], nn.Conv3d),
list(range(len(modules)))))[0]
conv_layer = modules[first_conv_idx]
container = modules[first_conv_idx - 1]
# modify parameters, assume the first blob contains the convolution kernels
motion_length = 1
params = [x.clone() for x in conv_layer.parameters()]
kernel_size = params[0].size()
new_kernel_size = kernel_size[:1] + (1*motion_length, ) + kernel_size[2:]
new_kernels = params[0].data.mean(dim=1, keepdim=True).expand(new_kernel_size).contiguous()
new_conv = nn.Conv3d(1, conv_layer.out_channels, conv_layer.kernel_size, conv_layer.stride,
conv_layer.padding, bias=True if len(params) == 2 else False)
new_conv.weight.data = new_kernels
if len(params) == 2:
new_conv.bias.data = params[1].data # add bias if neccessary
layer_name = list(container.state_dict().keys())[0][:-7] # remove .weight suffix to get the layer name
# replace the first convlution layer
setattr(container, layer_name, new_conv)
return base_model
def _construct_rgbdepth_model(base_model):
# modify the first convolution kernels for RGB-D input
modules = list(base_model.modules())
first_conv_idx = list(filter(lambda x: isinstance(modules[x], nn.Conv3d),
list(range(len(modules)))))[0]
conv_layer = modules[first_conv_idx]
container = modules[first_conv_idx - 1]
# modify parameters, assume the first blob contains the convolution kernels
motion_length = 1
params = [x.clone() for x in conv_layer.parameters()]
kernel_size = params[0].size()
new_kernel_size = kernel_size[:1] + (1 * motion_length,) + kernel_size[2:]
new_kernels = torch.mul(torch.cat((params[0].data, params[0].data.mean(dim=1,keepdim=True).expand(new_kernel_size).contiguous()), 1), 0.6)
new_kernel_size = kernel_size[:1] + (3 + 1 * motion_length,) + kernel_size[2:]
new_conv = nn.Conv3d(4, conv_layer.out_channels, conv_layer.kernel_size, conv_layer.stride,
conv_layer.padding, bias=True if len(params) == 2 else False)
new_conv.weight.data = new_kernels
if len(params) == 2:
new_conv.bias.data = params[1].data # add bias if neccessary
layer_name = list(container.state_dict().keys())[0][:-7] # remove .weight suffix to get the layer name
# replace the first convolution layer
setattr(container, layer_name, new_conv)
return base_model
def _modify_first_conv_layer(base_model, new_kernel_size1, new_filter_num):
modules = list(base_model.modules())
first_conv_idx = list(filter(lambda x: isinstance(modules[x], nn.Conv3d),
list(range(len(modules)))))[0]
conv_layer = modules[first_conv_idx]
container = modules[first_conv_idx - 1]
new_conv = nn.Conv3d(new_filter_num, conv_layer.out_channels, kernel_size=(new_kernel_size1,7,7),
stride=(1,2,2), padding=(1,3,3), bias=False)
layer_name = list(container.state_dict().keys())[0][:-7]
setattr(container, layer_name, new_conv)
return base_model
| 40.630631
| 142
| 0.622727
| 1,168
| 9,020
| 4.600171
| 0.127568
| 0.038526
| 0.038712
| 0.045971
| 0.860599
| 0.846082
| 0.834729
| 0.80737
| 0.777219
| 0.735157
| 0
| 0.017903
| 0.26929
| 9,020
| 221
| 143
| 40.81448
| 0.797299
| 0.063193
| 0
| 0.699387
| 1
| 0
| 0.081039
| 0
| 0
| 0
| 0
| 0
| 0.042945
| 1
| 0.02454
| false
| 0
| 0.04908
| 0
| 0.110429
| 0.08589
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db41d69bc168d194eb75227ec828c572e72a241f
| 56
|
py
|
Python
|
tests/test_pyspark-delta-utility.py
|
AraiYuno/pyspark-delta-utility
|
1e9fd792f52fab17cb7059451e242e7675b481ea
|
[
"MIT"
] | 1
|
2022-03-26T07:06:06.000Z
|
2022-03-26T07:06:06.000Z
|
tests/test_pyspark-delta-utility.py
|
AraiYuno/pyspark-delta-utility
|
1e9fd792f52fab17cb7059451e242e7675b481ea
|
[
"MIT"
] | null | null | null |
tests/test_pyspark-delta-utility.py
|
AraiYuno/pyspark-delta-utility
|
1e9fd792f52fab17cb7059451e242e7675b481ea
|
[
"MIT"
] | null | null | null |
from pyspark_delta_utility import pyspark_delta_utility
| 28
| 55
| 0.928571
| 8
| 56
| 6
| 0.625
| 0.5
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 56
| 1
| 56
| 56
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db766565088e5ff1a2ec58be8e6c1f337e5373d1
| 355
|
py
|
Python
|
alexia/views/__init__.py
|
LaudateCorpus1/alexia-1
|
9c0d3c90c0ffe2237299a561b755b9c17905e354
|
[
"BSD-3-Clause"
] | 8
|
2015-06-29T20:01:22.000Z
|
2020-10-19T13:49:38.000Z
|
alexia/views/__init__.py
|
LaudateCorpus1/alexia-1
|
9c0d3c90c0ffe2237299a561b755b9c17905e354
|
[
"BSD-3-Clause"
] | 67
|
2015-10-05T16:57:14.000Z
|
2022-03-28T19:57:36.000Z
|
alexia/views/__init__.py
|
LaudateCorpus1/alexia-1
|
9c0d3c90c0ffe2237299a561b755b9c17905e354
|
[
"BSD-3-Clause"
] | 6
|
2015-10-05T13:54:34.000Z
|
2021-11-30T05:11:58.000Z
|
from .mixins import (
CreateViewForEvent, CreateViewForOrganization, EventOrganizerFilterMixin,
FixedValueCreateView, OrganizationFilterMixin, OrganizationFormMixin,
)
__all__ = [
'CreateViewForEvent', 'CreateViewForOrganization', 'EventOrganizerFilterMixin',
'FixedValueCreateView', 'OrganizationFilterMixin', 'OrganizationFormMixin',
]
| 35.5
| 83
| 0.808451
| 16
| 355
| 17.6875
| 0.625
| 0.303887
| 0.480565
| 0.621908
| 0.932862
| 0.932862
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107042
| 355
| 9
| 84
| 39.444444
| 0.892744
| 0
| 0
| 0
| 0
| 0
| 0.371831
| 0.264789
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dba547a363bab145b3785873673fbcf6492607d8
| 10,158
|
py
|
Python
|
16B/pipeline4.7.1_custom/EVLA_pipe_plotsummary.py
|
e-koch/VLA_Lband
|
8fca7b2de0b88ce5c5011b34bf3936c69338d0b0
|
[
"MIT"
] | 1
|
2021-03-08T23:19:12.000Z
|
2021-03-08T23:19:12.000Z
|
16B/pipeline4.7.1_custom/EVLA_pipe_plotsummary.py
|
e-koch/VLA_Lband
|
8fca7b2de0b88ce5c5011b34bf3936c69338d0b0
|
[
"MIT"
] | null | null | null |
16B/pipeline4.7.1_custom/EVLA_pipe_plotsummary.py
|
e-koch/VLA_Lband
|
8fca7b2de0b88ce5c5011b34bf3936c69338d0b0
|
[
"MIT"
] | null | null | null |
######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
#MAKING FINAL UV PLOTS ON ALL SOURCES
logprint ("Starting EVLA_pipe_plotsummary.py", logfileout='logs/plotsummary.log')
time_list=runtiming('plotsummary', 'start')
QA2_plotsummary='Pass'
logprint ("Making final UV plots", logfileout='logs/plotsummary.log')
# Make some plots of the calibrated data
default('plotms')
vis=ms_active
xaxis='time'
yaxis='phase'
ydatacolumn='corrected'
selectdata=True
field=calibrator_field_select_string
correlation=corrstring
averagedata=True
avgchannel=str(max(channels))
avgtime='1e8'
avgscan=False
transform=False
extendflag=False
iteraxis=''
coloraxis='antenna2'
plotrange=[]
title='Calibrated phase vs. time, all calibrators'
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='all_calibrators_phase_time.png'
overwrite=True
showgui=False
plotms()
#
# see whether we don't need this any more in CASA 4.2.2...
#
#mylogfile = casalog.logfile()
#countmax = 100
#countr = 0
#foundend=False
#while not foundend and countr<countmax:
# os.system('sleep 10s')
# f = os.popen('tail -n 10 '+mylogfile)
# fstrs = f.readlines()
# f.close()
# for fstr in fstrs:
# if fstr.count('End Task: plotms')>0:
# foundend=True
# print 'Found end of task plotms in logfile at count '+str(countr)
# countr+=1
#default('plotms')
#vis=ms_active
#xaxis='time'
#yaxis='amp'
#ydatacolumn='residual'
#selectdata=True
#field=calibrator_field_select_string
#correlation=corrstring
#averagedata=True
#avgchannel=str(max(channels))
#avgtime='1e8'
#avgscan=False
#transform=False
#extendflag=False
#iteraxis=''
#coloraxis='antenna2'
#plotrange=[]
#title='Corrected-model amp vs. time, all calibrators'
#xlabel=''
#ylabel=''
#showmajorgrid=False
#showminorgrid=False
#plotfile='all_calibrators_resid_amp_time.png'
#overwrite=True
#plotms()
##
#mylogfile = casalog.logfile()
#countmax = 100
#countr = 0
#foundend=False
#while not foundend and countr<countmax:
# os.system('sleep 10s')
# f = os.popen('tail -n 10 '+mylogfile)
# fstrs = f.readlines()
# f.close()
# for fstr in fstrs:
# if fstr.count('End Task: plotms')>0:
# foundend=True
# print 'Found end of task plotms in logfile at count '+str(countr)
# countr+=1
#for ii in range(0,len(calibrator_field_list)):
for ii in field_ids:
print ii
default('plotms')
vis=ms_active
xaxis='uvwave'
yaxis='amp'
ydatacolumn='corrected'
selectdata=True
# field=str(calibrator_field_list[ii])
field=str(field_ids[ii])
correlation=corrstring
averagedata=True
avgchannel=str(max(channels))
avgtime='1e8'
avgscan=False
transform=False
extendflag=False
iteraxis=''
coloraxis='spw'
plotrange=[]
title='Field '+field+', '+field_names[ii]
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='field'+field+'_amp_uvdist.png'
overwrite=True
showgui=False
plotms()
## Make Amp vs. Freq plots per field per baseband for newer data sets that have the band, baseband and spw info
## such as EVLA_X#A0C0#0
if '#' in spw_names[0]:
for ii in field_ids:
for iii in range(0,len(spws_info)):
BB=spws_info[iii]
band=BB[0]
bband=BB[1]
bbspw=BB[2]
if ((band=='X') and (len(bbspw)==1)):
print band, bband, bbspw
logprint("Seems to have encountered a reference poining spw; Amp vs. Freq plots will not be made for this spw", logfileout='logs/plotsummary.log')
else:
print band, bband, bbspw
default('plotms')
vis=ms_active
xaxis='freq'
yaxis='amp'
ydatacolumn='corrected'
selectdata=True
field=str(field_ids[ii])
correlation=corrstring
spw=str(bbspw).strip('[]')
averagedata=True
avgtime='1e8'
avgscan=True
avgantenna=True
transform=False
extendflag=False
iteraxis=''
coloraxis='antenna1'
plotrange=[0, 0, 0, 0]
title='Field '+field+', '+field_names[ii]+', '+band+'-Band '+bband+', spw='+str(bbspw)
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='field'+field+'_'+band+'-Band_'+bband+'_amp_freq.png'
overwrite=True
showgui=False
plotms()
else:
logprint("These are old EVLA data; will make one Amp vs. Freq plot per field with all available spectral windows", logfileout='logs/plotsummary.log')
for ii in field_ids:
default('plotms')
vis=ms_active
xaxis='freq'
yaxis='amp'
ydatacolumn='corrected'
selectdata=True
field=str(field_ids[ii])
correlation=corrstring
averagedata=True
avgtime='1e8'
avgscan=True
avgantenna=True
transform=False
extendflag=False
iteraxis=''
coloraxis='antenna1'
plotrange=[0, 0, 0, 0]
title='Field '+field+', '+field_names[ii]
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='field'+field+'_amp_freq.png'
overwrite=True
showgui=False
plotms()
## Make Phase vs. Freq plots per field per baseband for newer data sets that have the band, baseband and spw info
## such as EVLA_X#A0C0#0
if '#' in spw_names[0]:
for ii in field_ids:
for iii in range(0,len(spws_info)):
BB=spws_info[iii]
band=BB[0]
bband=BB[1]
bbspw=BB[2]
if ((band=='X') and (len(bbspw)==1)):
print band, bband, bbspw
logprint("Seems to have encountered a reference pointing spw; Phase vs. Freq plots will not be made for this spw", logfileout='logs/plotsummary.log')
else:
print band, bband, bbspw
default('plotms')
vis=ms_active
xaxis='freq'
yaxis='phase'
ydatacolumn='corrected'
selectdata=True
field=str(field_ids[ii])
correlation=corrstring
spw=str(bbspw).strip('[]')
averagedata=True
avgtime='1e8'
avgscan=True
avgantenna=True
transform=False
extendflag=False
iteraxis=''
coloraxis='antenna1'
plotrange=[0, 0, -180, 180]
title='Field '+field+', '+field_names[ii]+', '+band+'-Band '+bband+', spw='+str(bbspw)
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='field'+field+'_'+band+'-Band_'+bband+'_phase_freq.png'
overwrite=True
showgui=False
plotms()
else:
logprint("These are old EVLA data; will make one Phase vs. Freq plot per field with all available spectral windows", logfileout='logs/plotsummary.log')
for ii in field_ids:
default('plotms')
vis=ms_active
xaxis='freq'
yaxis='phase'
ydatacolumn='corrected'
selectdata=True
field=str(field_ids[ii])
correlation=corrstring
averagedata=True
avgtime='1e8'
avgscan=True
avgantenna=True
transform=False
extendflag=False
iteraxis=''
coloraxis='antenna1'
plotrange=[0, 0, -180, 180]
title='Field '+field+', '+field_names[ii]
xlabel=''
ylabel=''
showmajorgrid=False
showminorgrid=False
plotfile='field'+field+'_phase_freq.png'
overwrite=True
showgui=False
plotms()
# see if we can omit this now in CASA 4.2.2...
#
# mylogfile = casalog.logfile()
# countmax = 100
# countr = 0
# foundend=False
# while not foundend and countr<countmax:
# os.system('sleep 10s')
# f = os.popen('tail -n 10 '+mylogfile)
# fstrs = f.readlines()
# f.close()
# for fstr in fstrs:
# if fstr.count('End Task: plotms')>0:
# foundend=True
# print 'Found end of task plotms in logfile at count '+str(countr)
# countr+=1
#
#Remove plot with no points
blankstatus = checkblankplot(plotfile, maincasalog)
logprint ("QA2 score: "+QA2_plotsummary, logfileout='logs/plotsummary.log')
logprint ("Finished EVLA_pipe_plotsummary.py", logfileout='logs/plotsummary.log')
time_list=runtiming('plotsummary', 'end')
pipeline_save()
######################################################################
| 31.159509
| 165
| 0.595294
| 1,177
| 10,158
| 5.078165
| 0.229397
| 0.025096
| 0.033462
| 0.037477
| 0.782667
| 0.780157
| 0.769784
| 0.740171
| 0.704199
| 0.701188
| 0
| 0.015663
| 0.27722
| 10,158
| 325
| 166
| 31.255385
| 0.79842
| 0.326639
| 0
| 0.869347
| 0
| 0.020101
| 0.177853
| 0.01198
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005025
| 0
| null | null | 0.065327
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dba88d1adbb2fa4423d0da1f44afb664e1d6ebc9
| 35,465
|
py
|
Python
|
fhir/resources/tests/test_auditevent.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 144
|
2019-05-08T14:24:43.000Z
|
2022-03-30T02:37:11.000Z
|
fhir/resources/tests/test_auditevent.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 82
|
2019-05-13T17:43:13.000Z
|
2022-03-30T16:45:17.000Z
|
fhir/resources/tests/test_auditevent.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 48
|
2019-04-04T14:14:53.000Z
|
2022-03-30T06:07:31.000Z
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/AuditEvent
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from .. import fhirtypes # noqa: F401
from .. import auditevent
def impl_auditevent_1(inst):
assert inst.action == "E"
assert inst.agent[0].altId == "601847123"
assert inst.agent[0].name == "Grahame Grieve"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "95"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.entity[0].query == bytes_validator(
(
"aHR0cDovL2ZoaXItZGV2LmhlYWx0aGludGVyc2VjdGlvbnMuY29tLmF1L29w"
"ZW4vRW5jb3VudGVyP3BhcnRpY2lwYW50PTEz"
)
)
assert inst.entity[0].role.code == "24"
assert inst.entity[0].role.display == "Query"
assert (
inst.entity[0].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[0].type.code == "2"
assert inst.entity[0].type.display == "System Object"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.id == "example-search"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2015-08-22T23:42:24Z")
assert inst.source.observer.display == "hl7connect.healthintersections.com.au"
assert inst.source.site == "Cloud"
assert inst.source.type[0].code == "3"
assert inst.source.type[0].display == "Web Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "search"
assert inst.subtype[0].display == "search"
assert inst.subtype[0].system == "http://hl7.org/fhir/restful-interaction"
assert inst.text.status == "generated"
assert inst.type.code == "rest"
assert inst.type.display == "Restful Operation"
assert inst.type.system == "http://terminology.hl7.org/CodeSystem/audit-event-type"
def test_auditevent_1(base_settings):
"""No. 1 tests collection for AuditEvent.
Test File: audit-event-example-search.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-search.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_1(inst2)
def impl_auditevent_2(inst):
assert inst.action == "E"
assert inst.agent[0].altId == "601847123"
assert inst.agent[0].name == "Grahame Grieve"
assert inst.agent[0].network.address == "127.0.0.1"
assert inst.agent[0].network.type == "2"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "95"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.id == "example-logout"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2013-06-20T23:46:41Z")
assert (
inst.source.observer.identifier.value == "hl7connect.healthintersections.com.au"
)
assert inst.source.site == "Cloud"
assert inst.source.type[0].code == "3"
assert inst.source.type[0].display == "Web Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "110123"
assert inst.subtype[0].display == "Logout"
assert inst.subtype[0].system == "http://dicom.nema.org/resources/ontology/DCM"
assert inst.text.status == "generated"
assert inst.type.code == "110114"
assert inst.type.display == "User Authentication"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_2(base_settings):
"""No. 2 tests collection for AuditEvent.
Test File: audit-event-example-logout.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-logout.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_2(inst2)
def impl_auditevent_3(inst):
assert inst.action == "R"
assert inst.agent[0].altId == "601847123"
assert inst.agent[0].name == "Grahame Grieve"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "95"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.entity[0].lifecycle.code == "6"
assert inst.entity[0].lifecycle.display == "Access / Use"
assert (
inst.entity[0].lifecycle.system
== "http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"
)
assert inst.entity[0].type.code == "2"
assert inst.entity[0].type.display == "System Object"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[0].what.reference == "Patient/example/_history/1"
assert inst.id == "example-rest"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2013-06-20T23:42:24Z")
assert (
inst.source.observer.identifier.value == "hl7connect.healthintersections.com.au"
)
assert inst.source.site == "Cloud"
assert inst.source.type[0].code == "3"
assert inst.source.type[0].display == "Web Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "vread"
assert inst.subtype[0].display == "vread"
assert inst.subtype[0].system == "http://hl7.org/fhir/restful-interaction"
assert inst.text.status == "generated"
assert inst.type.code == "rest"
assert inst.type.display == "Restful Operation"
assert inst.type.system == "http://terminology.hl7.org/CodeSystem/audit-event-type"
def test_auditevent_3(base_settings):
"""No. 3 tests collection for AuditEvent.
Test File: audit-event-example-vread.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-vread.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_3(inst2)
def impl_auditevent_4(inst):
assert inst.action == "R"
assert inst.agent[0].requestor is False
assert inst.agent[0].type.coding[0].code == "110153"
assert inst.agent[0].type.coding[0].display == "Source Role ID"
assert (
inst.agent[0].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[0].who.display == "ExportToMedia.app"
assert inst.agent[1].altId == "601847123"
assert inst.agent[1].name == "Grahame Grieve"
assert inst.agent[1].requestor is True
assert inst.agent[1].type.coding[0].code == "humanuser"
assert inst.agent[1].type.coding[0].display == "human user"
assert inst.agent[1].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[1].who.identifier.value == "95"
assert inst.agent[2].media.code == "110033"
assert inst.agent[2].media.display == "DVD"
assert inst.agent[2].media.system == "http://dicom.nema.org/resources/ontology/DCM"
assert inst.agent[2].name == "Media title: Hello World"
assert inst.agent[2].requestor is False
assert inst.agent[2].type.coding[0].code == "110154"
assert inst.agent[2].type.coding[0].display == "Destination Media"
assert (
inst.agent[2].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.entity[0].role.code == "1"
assert inst.entity[0].role.display == "Patient"
assert (
inst.entity[0].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[0].type.code == "1"
assert inst.entity[0].type.display == "Person"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert (
inst.entity[0].what.identifier.value
== "e3cdfc81a0d24bd^^^&2.16.840.1.113883.4.2&ISO"
)
assert inst.entity[1].role.code == "20"
assert inst.entity[1].role.display == "Job"
assert (
inst.entity[1].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[1].type.code == "2"
assert inst.entity[1].type.display == "System Object"
assert (
inst.entity[1].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[1].what.identifier.type.coding[0].code == "IHE XDS Metadata"
assert (
inst.entity[1].what.identifier.type.coding[0].display
== "submission set classificationNode"
)
assert (
inst.entity[1].what.identifier.type.coding[0].system
== "urn:uuid:a54d6aa5-d40d-43f9-88c5-b4633d873bdd"
)
assert (
inst.entity[1].what.identifier.value
== "e3cdfc81a0d24bd^^^&2.16.840.1.113883.4.2&ISO"
)
assert inst.entity[2].type.code == "2"
assert inst.entity[2].type.display == "System Object"
assert (
inst.entity[2].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[2].what.reference == "DocumentManifest/example"
assert inst.id == "example-media"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2015-08-27T23:42:24Z")
assert inst.source.observer.display == "hl7connect.healthintersections.com.au"
assert inst.subtype[0].code == "ITI-32"
assert inst.subtype[0].display == "Distribute Document Set on Media"
assert inst.subtype[0].system == "urn:oid:1.3.6.1.4.1.19376.1.2"
assert inst.text.status == "generated"
assert inst.type.code == "110106"
assert inst.type.display == "Export"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_4(base_settings):
"""No. 4 tests collection for AuditEvent.
Test File: audit-event-example-media.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-media.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_4(inst2)
def impl_auditevent_5(inst):
assert inst.action == "E"
assert inst.agent[0].altId == "601847123"
assert inst.agent[0].name == "Grahame Grieve"
assert inst.agent[0].network.address == "127.0.0.1"
assert inst.agent[0].network.type == "2"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "95"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.id == "example-login"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2013-06-20T23:41:23Z")
assert (
inst.source.observer.identifier.value == "hl7connect.healthintersections.com.au"
)
assert inst.source.site == "Cloud"
assert inst.source.type[0].code == "3"
assert inst.source.type[0].display == "Web Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "110122"
assert inst.subtype[0].display == "Login"
assert inst.subtype[0].system == "http://dicom.nema.org/resources/ontology/DCM"
assert inst.text.status == "generated"
assert inst.type.code == "110114"
assert inst.type.display == "User Authentication"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_5(base_settings):
"""No. 5 tests collection for AuditEvent.
Test File: audit-event-example-login.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-login.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_5(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_5(inst2)
def impl_auditevent_6(inst):
assert inst.action == "E"
assert inst.agent[0].altId == "6580"
assert inst.agent[0].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[0].network.type == "1"
assert inst.agent[0].requestor is False
assert inst.agent[0].type.coding[0].code == "110153"
assert inst.agent[0].type.coding[0].display == "Source Role ID"
assert (
inst.agent[0].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[0].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[0].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.agent[1].altId == "601847123"
assert inst.agent[1].name == "Grahame Grieve"
assert inst.agent[1].requestor is True
assert inst.agent[1].type.coding[0].code == "humanuser"
assert inst.agent[1].type.coding[0].display == "human user"
assert inst.agent[1].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[1].who.identifier.value == "95"
assert inst.entity[0].role.code == "1"
assert inst.entity[0].role.display == "Patient"
assert (
inst.entity[0].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[0].type.code == "1"
assert inst.entity[0].type.display == "Person"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert (
inst.entity[0].what.identifier.value
== "e3cdfc81a0d24bd^^^&2.16.840.1.113883.4.2&ISO"
)
assert inst.entity[1].detail[0].type == "MSH-10"
assert inst.entity[1].detail[0].valueBase64Binary == bytes_validator(
"MS4yLjg0MC4xMTQzNTAuMS4xMy4wLjEuNy4xLjE="
)
assert inst.entity[1].role.code == "24"
assert inst.entity[1].role.display == "Query"
assert (
inst.entity[1].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[1].type.code == "2"
assert inst.entity[1].type.display == "System Object"
assert (
inst.entity[1].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.id == "example-pixQuery"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2015-08-26T23:42:24Z")
assert inst.source.observer.display == "hl7connect.healthintersections.com.au"
assert inst.subtype[0].code == "ITI-9"
assert inst.subtype[0].display == "PIX Query"
assert inst.subtype[0].system == "urn:oid:1.3.6.1.4.1.19376.1.2"
assert inst.text.status == "generated"
assert inst.type.code == "110112"
assert inst.type.display == "Query"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_6(base_settings):
"""No. 6 tests collection for AuditEvent.
Test File: audit-event-example-pixQuery.json
"""
filename = base_settings["unittest_data_dir"] / "audit-event-example-pixQuery.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_6(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_6(inst2)
def impl_auditevent_7(inst):
assert inst.action == "E"
assert inst.agent[0].network.address == "127.0.0.1"
assert inst.agent[0].network.type == "2"
assert inst.agent[0].requestor is False
assert inst.agent[0].role[0].text == "Service User (Logon)"
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "Grahame"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.entity[0].lifecycle.code == "6"
assert inst.entity[0].lifecycle.display == "Access / Use"
assert (
inst.entity[0].lifecycle.system
== "http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"
)
assert inst.entity[0].name == "Grahame's Laptop"
assert inst.entity[0].role.code == "4"
assert inst.entity[0].role.display == "Domain Resource"
assert (
inst.entity[0].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[0].type.code == "4"
assert inst.entity[0].type.display == "Other"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[0].what.identifier.type.coding[0].code == "SNO"
assert (
inst.entity[0].what.identifier.type.coding[0].system
== "http://terminology.hl7.org/CodeSystem/v2-0203"
)
assert inst.entity[0].what.identifier.type.text == "Dell Serial Number"
assert inst.entity[0].what.identifier.value == "ABCDEF"
assert inst.id == "example"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.recorded == fhirtypes.Instant.validate("2012-10-25T22:04:27+11:00")
assert inst.source.observer.display == "Grahame's Laptop"
assert inst.source.site == "Development"
assert inst.source.type[0].code == "110122"
assert inst.source.type[0].display == "Login"
assert inst.source.type[0].system == "http://dicom.nema.org/resources/ontology/DCM"
assert inst.subtype[0].code == "110120"
assert inst.subtype[0].display == "Application Start"
assert inst.subtype[0].system == "http://dicom.nema.org/resources/ontology/DCM"
assert inst.text.div == (
'<div xmlns="http://www.w3.org/1999/xhtml">Application '
"Start for under service login "Grahame" (id: "
"Grahame's Test HL7Connect)</div>"
)
assert inst.text.status == "generated"
assert inst.type.code == "110100"
assert inst.type.display == "Application Activity"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_7(base_settings):
"""No. 7 tests collection for AuditEvent.
Test File: auditevent-example.json
"""
filename = base_settings["unittest_data_dir"] / "auditevent-example.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_7(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_7(inst2)
def impl_auditevent_8(inst):
assert inst.action == "R"
assert inst.agent[0].altId == "notMe"
assert inst.agent[0].location.reference == "Location/1"
assert inst.agent[0].name == "That guy everyone wishes would be caught"
assert inst.agent[0].network.address == "custodian.net"
assert inst.agent[0].network.type == "1"
assert inst.agent[0].policy[0] == "http://consent.com/yes"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "110153"
assert inst.agent[0].type.coding[0].display == "Source Role ID"
assert (
inst.agent[0].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[0].who.identifier.value == "SomeIdiot@nowhere"
assert inst.agent[1].network.address == "marketing.land"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].purposeOfUse[0].coding[0].code == "HMARKT"
assert inst.agent[1].purposeOfUse[0].coding[0].display == "healthcare marketing"
assert (
inst.agent[1].purposeOfUse[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110152"
assert inst.agent[1].type.coding[0].display == "Destination Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.display == "Where"
assert inst.agent[1].who.reference == "Practitioner/example"
assert inst.entity[0].role.code == "1"
assert inst.entity[0].role.display == "Patient"
assert (
inst.entity[0].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[0].type.code == "1"
assert inst.entity[0].type.display == "Person"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[0].what.reference == "Patient/example"
assert inst.entity[1].description == "data about Everthing important"
assert inst.entity[1].lifecycle.code == "11"
assert inst.entity[1].lifecycle.display == "Disclosure"
assert (
inst.entity[1].lifecycle.system
== "http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"
)
assert inst.entity[1].name == "Namne of What"
assert inst.entity[1].role.code == "4"
assert inst.entity[1].role.display == "Domain Resource"
assert (
inst.entity[1].role.system
== "http://terminology.hl7.org/CodeSystem/object-role"
)
assert inst.entity[1].securityLabel[0].code == "V"
assert inst.entity[1].securityLabel[0].display == "very restricted"
assert (
inst.entity[1].securityLabel[0].system
== "http://terminology.hl7.org/CodeSystem/v3-Confidentiality"
)
assert inst.entity[1].securityLabel[1].code == "STD"
assert (
inst.entity[1].securityLabel[1].display
== "sexually transmitted disease information sensitivity"
)
assert (
inst.entity[1].securityLabel[1].system
== "http://terminology.hl7.org/CodeSystem/v3-ActCode"
)
assert inst.entity[1].securityLabel[2].code == "DELAU"
assert inst.entity[1].securityLabel[2].display == "delete after use"
assert (
inst.entity[1].securityLabel[2].system
== "http://terminology.hl7.org/CodeSystem/v3-ActCode"
)
assert inst.entity[1].type.code == "2"
assert inst.entity[1].type.display == "System Object"
assert (
inst.entity[1].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[1].what.identifier.value == "What.id"
assert inst.entity[1].what.reference == "Patient/example/_history/1"
assert inst.id == "example-disclosure"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "0"
assert inst.outcomeDesc == "Successful Disclosure"
assert inst.purposeOfEvent[0].coding[0].code == "HMARKT"
assert inst.purposeOfEvent[0].coding[0].display == "healthcare marketing"
assert (
inst.purposeOfEvent[0].coding[0].system
== "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.recorded == fhirtypes.Instant.validate("2013-09-22T00:08:00Z")
assert (
inst.source.observer.display == "Watchers Accounting of Disclosures Application"
)
assert inst.source.site == "Watcher"
assert inst.source.type[0].code == "4"
assert inst.source.type[0].display == "Application Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "Disclosure"
assert inst.subtype[0].display == "HIPAA disclosure"
assert inst.text.div == (
'<div xmlns="http://www.w3.org/1999/xhtml">Disclosure by '
"some idiot, for marketing reasons, to places unknown, of a "
"Poor Sap, data about Everthing important.</div>"
)
assert inst.text.status == "generated"
assert inst.type.code == "110106"
assert inst.type.display == "Export"
assert inst.type.system == "http://dicom.nema.org/resources/ontology/DCM"
def test_auditevent_8(base_settings):
"""No. 8 tests collection for AuditEvent.
Test File: auditevent-example-disclosure.json
"""
filename = base_settings["unittest_data_dir"] / "auditevent-example-disclosure.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_8(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_8(inst2)
def impl_auditevent_9(inst):
assert inst.action == "C"
assert inst.agent[0].altId == "601847123"
assert inst.agent[0].name == "Grahame Grieve"
assert inst.agent[0].requestor is True
assert inst.agent[0].type.coding[0].code == "humanuser"
assert inst.agent[0].type.coding[0].display == "human user"
assert inst.agent[0].type.coding[0].system == (
"http://terminology.hl7.org/CodeSystem/extra-security-role-" "type"
)
assert inst.agent[0].who.identifier.value == "95"
assert inst.agent[1].altId == "6580"
assert inst.agent[1].network.address == "Workstation1.ehr.familyclinic.com"
assert inst.agent[1].network.type == "1"
assert inst.agent[1].requestor is False
assert inst.agent[1].type.coding[0].code == "110153"
assert inst.agent[1].type.coding[0].display == "Source Role ID"
assert (
inst.agent[1].type.coding[0].system
== "http://dicom.nema.org/resources/ontology/DCM"
)
assert inst.agent[1].who.identifier.system == "urn:oid:2.16.840.1.113883.4.2"
assert inst.agent[1].who.identifier.value == "2.16.840.1.113883.4.2"
assert inst.contained[0].id == "o1"
assert inst.entity[0].detail[0].type == "requested transaction"
assert inst.entity[0].detail[0].valueString == "http POST ..... "
assert inst.entity[0].type.code == "2"
assert inst.entity[0].type.display == "System Object"
assert (
inst.entity[0].type.system
== "http://terminology.hl7.org/CodeSystem/audit-entity-type"
)
assert inst.entity[1].description == "transaction failed"
assert inst.entity[1].type.code == "OperationOutcome"
assert inst.entity[1].type.display == "OperationOutcome"
assert inst.entity[1].type.system == "http://hl7.org/fhir/resource-types"
assert inst.entity[1].what.reference == "#o1"
assert inst.id == "example-error"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.outcome == "8"
assert inst.outcomeDesc == (
"Invalid request to create an Operation resource on the " "Patient endpoint."
)
assert inst.recorded == fhirtypes.Instant.validate("2017-09-07T23:42:24Z")
assert (
inst.source.observer.identifier.value == "hl7connect.healthintersections.com.au"
)
assert inst.source.site == "Cloud"
assert inst.source.type[0].code == "3"
assert inst.source.type[0].display == "Web Server"
assert (
inst.source.type[0].system
== "http://terminology.hl7.org/CodeSystem/security-source-type"
)
assert inst.subtype[0].code == "create"
assert inst.subtype[0].display == "create"
assert inst.subtype[0].system == "http://hl7.org/fhir/restful-interaction"
assert inst.text.status == "generated"
assert inst.type.code == "rest"
assert inst.type.display == "Restful Operation"
assert inst.type.system == "http://terminology.hl7.org/CodeSystem/audit-event-type"
def test_auditevent_9(base_settings):
"""No. 9 tests collection for AuditEvent.
Test File: auditevent-example-error.json
"""
filename = base_settings["unittest_data_dir"] / "auditevent-example-error.json"
inst = auditevent.AuditEvent.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "AuditEvent" == inst.resource_type
impl_auditevent_9(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "AuditEvent" == data["resourceType"]
inst2 = auditevent.AuditEvent(**data)
impl_auditevent_9(inst2)
| 40.952656
| 88
| 0.662174
| 4,722
| 35,465
| 4.943668
| 0.073486
| 0.185058
| 0.102168
| 0.054147
| 0.898689
| 0.868489
| 0.823209
| 0.806588
| 0.79001
| 0.757582
| 0
| 0.049108
| 0.178345
| 35,465
| 865
| 89
| 41
| 0.75199
| 0.042605
| 0
| 0.615588
| 0
| 0.002642
| 0.272945
| 0.046822
| 0
| 0
| 0
| 0
| 0.594452
| 1
| 0.023778
| false
| 0
| 0.006605
| 0
| 0.030383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbbb10584049f01536360706fd04b900f56830c3
| 1,256
|
py
|
Python
|
3d_printing/test1/test1.py
|
CoffeeAddict93/braille_translation
|
30d5514fa0a6c010df5ad053d6e69298dba836ab
|
[
"MIT"
] | 1
|
2021-11-24T03:51:06.000Z
|
2021-11-24T03:51:06.000Z
|
3d_printing/test1/test1.py
|
CoffeeAddict93/braille_translation
|
30d5514fa0a6c010df5ad053d6e69298dba836ab
|
[
"MIT"
] | null | null | null |
3d_printing/test1/test1.py
|
CoffeeAddict93/braille_translation
|
30d5514fa0a6c010df5ad053d6e69298dba836ab
|
[
"MIT"
] | null | null | null |
import bpy
# create cube
bpy.ops.mesh.primitive_cube_add()
# resize cube into flat surface
bpy.ops.transform.resize(value=(10,5,1))
# add some dots, 1st row
bpy.ops.mesh.primitive_uv_sphere_add(location=(-8,3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-5.5,3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-1.5,3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(1,3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(5,3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(7.5,3,1))
# more dots, 2nd row
bpy.ops.mesh.primitive_uv_sphere_add(location=(-8,0,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-5.5,0,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-1.5,0,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(1,0,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(5,0,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(7.5,0,1))
# more dots, 3rd row
bpy.ops.mesh.primitive_uv_sphere_add(location=(-8,-3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-5.5,-3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(-1.5,-3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(1,-3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(5,-3,1))
bpy.ops.mesh.primitive_uv_sphere_add(location=(7.5,-3,1))
| 40.516129
| 59
| 0.754777
| 243
| 1,256
| 3.670782
| 0.127572
| 0.134529
| 0.213004
| 0.404709
| 0.845291
| 0.845291
| 0.845291
| 0.845291
| 0.845291
| 0.845291
| 0
| 0.059574
| 0.06449
| 1,256
| 31
| 60
| 40.516129
| 0.699574
| 0.08121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.047619
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dbcaaecafc30b16c560287e07e395352311aeff3
| 1,789
|
py
|
Python
|
tests/test_mutations.py
|
sgalella/NQueensGA
|
f95514d7d128f29580fba095e7c486ea89c78a97
|
[
"MIT"
] | null | null | null |
tests/test_mutations.py
|
sgalella/NQueensGA
|
f95514d7d128f29580fba095e7c486ea89c78a97
|
[
"MIT"
] | null | null | null |
tests/test_mutations.py
|
sgalella/NQueensGA
|
f95514d7d128f29580fba095e7c486ea89c78a97
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from genetic_nqueens import mutation
class TestMutations(unittest.TestCase):
def test_swap(self):
np.testing.assert_array_equal(mutation.swap(np.array([1, 2, 3, 4, 5]), 0, 1), np.array([2, 1, 3, 4, 5]))
np.testing.assert_array_equal(mutation.swap(np.array([1, 2, 3, 4, 5, 6]), 1, 0), np.array([2, 1, 3, 4, 5, 6]))
np.testing.assert_array_equal(mutation.swap(np.array([1, 2, 3, 4, 5, 6, 7]), 3, 6), np.array([1, 2, 3, 7, 5, 6, 4]))
def test_insert(self):
np.testing.assert_array_equal(mutation.insert(np.array([1, 2, 3, 4, 5]), 0, 2), np.array([1, 3, 2, 4, 5]))
np.testing.assert_array_equal(mutation.insert(np.array([1, 2, 3, 4, 5, 6]), 0, 5), np.array([1, 6, 2, 3, 4, 5]))
np.testing.assert_array_equal(mutation.insert(np.array([1, 2, 3, 4, 5, 6, 7]), 3, 5), np.array([1, 2, 3, 4, 6, 5, 7]))
def test_scramble(self):
np.testing.assert_equal(all(np.equal(mutation.scramble(np.array([1, 2, 3, 4, 5]), 0, 2), np.array([1, 2, 3, 4, 5]))), False)
np.testing.assert_equal(all(np.equal(mutation.scramble(np.array([1, 2, 3, 4, 5, 6]), 0, 6), np.array([1, 2, 3, 4, 5, 6]))), False)
np.testing.assert_equal(all(np.equal(mutation.scramble(np.array([1, 2, 3, 4, 5, 6, 7]), 3, 5), np.array([1, 2, 3, 4, 5, 6, 7]))), False)
def test_inversion(self):
np.testing.assert_array_equal(mutation.inversion(np.array([1, 2, 3, 4, 5]), 0, 2), np.array([3, 2, 1, 4, 5]))
np.testing.assert_array_equal(mutation.inversion(np.array([1, 2, 3, 4, 5, 6]), 2, 4), np.array([1, 2, 5, 4, 3, 6]))
np.testing.assert_array_equal(mutation.inversion(np.array([1, 2, 3, 4, 5, 6, 7]), 5, 6), np.array([1, 2, 3, 4, 5, 7, 6]))
if __name__ == "__main__":
unittest.main()
| 54.212121
| 144
| 0.595305
| 345
| 1,789
| 2.988406
| 0.104348
| 0.162949
| 0.162949
| 0.165858
| 0.782735
| 0.782735
| 0.772066
| 0.734239
| 0.705141
| 0.695441
| 0
| 0.114519
| 0.179989
| 1,789
| 32
| 145
| 55.90625
| 0.588275
| 0
| 0
| 0
| 0
| 0
| 0.004472
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 1
| 0.181818
| false
| 0
| 0.136364
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91872b51a47703f361ea28266b22a24b9f7b24a5
| 74,664
|
py
|
Python
|
zun/tests/unit/compute/test_compute_manager.py
|
hualingson/zun
|
4fc4e9e0e0f5478d749215c7ba0679a8502f7737
|
[
"Apache-2.0"
] | null | null | null |
zun/tests/unit/compute/test_compute_manager.py
|
hualingson/zun
|
4fc4e9e0e0f5478d749215c7ba0679a8502f7737
|
[
"Apache-2.0"
] | null | null | null |
zun/tests/unit/compute/test_compute_manager.py
|
hualingson/zun
|
4fc4e9e0e0f5478d749215c7ba0679a8502f7737
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import uuidutils
from six import StringIO
from zun.common import consts
from zun.common import exception
from zun.compute import claims
from zun.compute import manager
import zun.conf
from zun import objects
from zun.objects.container import Container
from zun.objects.container_action import ContainerAction
from zun.objects.container_action import ContainerActionEvent
from zun.objects.exec_instance import ExecInstance
from zun.objects.image import Image
from zun.objects.network import Network
from zun.objects.volume_mapping import VolumeMapping
from zun.tests import base
from zun.tests.unit.container.fake_driver import FakeDriver as fake_driver
from zun.tests.unit.db import utils
class FakeResourceTracker(object):
def __init__(self, *args, **kwargs):
self.compute_node = mock.MagicMock()
def container_claim(self, context, container, pci_requests, limits):
return claims.NopClaim()
def container_update_claim(self, context, container, old_container,
limits):
return claims.NopClaim()
def remove_usage_from_container(self, contxt, context, is_remmoved=True):
pass
class FakeVolumeMapping(object):
def __init__(self):
self.__class__.volumes = []
self.volume_provider = 'fake_provider'
self.container_path = 'fake_path'
self.container_uuid = 'fake-cid'
self.cinder_volume_id = 'fake-vid'
self.volume_id = 123
self.connection_info = None
self.auto_remove = False
self.uuid = 'fake-uuid'
self.volume = mock.Mock()
self.volume.refresh.side_effect = self._volume_refresh
def _volume_refresh(self):
self.connection_info = 'refresh-info'
def create(self, context):
self.__class__.volumes.append(self)
def destroy(self):
self.__class__.volumes.remove(self)
def obj_attr_is_set(self, name):
return bool(self.__class__.volumes)
@classmethod
def list_by_container(cls, context, container_id):
return cls.volumes
@classmethod
def list_by_cinder_volume(cls, context, volume_id):
return cls.volumes
@classmethod
def count(cls, context, **filters):
return len(cls.volumes)
class TestManager(base.TestCase):
def setUp(self):
super(TestManager, self).setUp()
zun.conf.CONF.set_override(
'container_driver',
'zun.tests.unit.container.fake_driver.FakeDriver')
self.compute_manager = manager.Manager()
self.compute_manager._resource_tracker = FakeResourceTracker()
@mock.patch.object(Container, 'save')
def test_init_container_sets_creating_error(self, mock_save):
container = Container(self.context, **utils.get_test_container())
container.status = consts.CREATING
self.compute_manager._init_container(context=self.context,
container=container)
self.assertEqual(consts.ERROR, container.status)
self.assertIsNone(container.task_state)
@mock.patch.object(Container, 'save')
def test_init_container_sets_creating_tasks_error(self, mock_save):
tasks = [consts.CONTAINER_CREATING, consts.IMAGE_PULLING]
container = Container(self.context, **utils.get_test_container())
for task in tasks:
container.task_state = task
self.compute_manager._init_container(context=self.context,
container=container)
self.assertEqual(consts.ERROR, container.status)
self.assertIsNone(container.task_state)
@mock.patch.object(manager.Manager, 'container_reboot')
@mock.patch.object(Container, 'save')
def test_init_container_retries_reboot(self, mock_save,
mock_container_reboot):
container = Container(self.context, **utils.get_test_container())
container.task_state = consts.CONTAINER_REBOOTING
self.compute_manager._init_container(self.context, container)
mock_container_reboot.assert_called_once_with(self.context,
container, 60)
@mock.patch.object(manager.Manager, 'container_start')
@mock.patch.object(Container, 'save')
def test_init_container_retries_start(self, mock_save,
mock_container_start):
container = Container(self.context, **utils.get_test_container())
container.task_state = consts.CONTAINER_STARTING
container.status = consts.STOPPED
self.compute_manager._init_container(self.context, container)
mock_container_start.assert_called_once_with(self.context,
container)
@mock.patch.object(manager.Manager, 'container_start')
@mock.patch.object(Container, 'save')
def test_container_reboot_after_host_reboot(self, mock_save,
mock_container_start):
container_1 = Container(self.context, **utils.get_test_container())
container_1.status = consts.RUNNING
self.compute_manager.restore_running_container(self.context,
container_1,
consts.STOPPED)
mock_container_start.assert_called_once_with(self.context,
container_1)
@mock.patch.object(manager.Manager, 'container_stop')
@mock.patch.object(Container, 'save')
def test_init_container_retries_stop(self, mock_save,
mock_container_stop):
container = Container(self.context, **utils.get_test_container())
container.task_state = consts.CONTAINER_STOPPING
self.compute_manager._init_container(self.context, container)
mock_container_stop.assert_called_once_with(self.context,
container, 60)
@mock.patch.object(manager.Manager, 'container_delete')
@mock.patch.object(Container, 'save')
def test_init_container_retries_deleting(self, mock_save,
mock_container_delete):
kw = {'status': consts.DELETING,
'task_state': None}
container = Container(self.context, **utils.get_test_container(**kw))
self.compute_manager._init_container(self.context, container)
mock_container_delete.assert_called_once_with(self.context, container,
force=True)
@mock.patch.object(manager.Manager, 'container_delete')
@mock.patch.object(Container, 'save')
def test_init_container_retries_container_delete_task(
self, mock_save, mock_container_delete):
container = Container(self.context, **utils.get_test_container())
container.task_state = consts.CONTAINER_DELETING
self.compute_manager._init_container(self.context, container)
mock_container_delete.assert_called_once_with(self.context, container,
force=True)
@mock.patch.object(Container, 'save')
def test_fail_container(self, mock_save):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._fail_container(self.context, container,
"Creation Failed")
self.assertEqual(consts.ERROR, container.status)
self.assertEqual("Creation Failed", container.status_reason)
self.assertIsNone(container.task_state)
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'create')
def test_container_create(self, mock_create, mock_pull, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance'}
mock_pull.return_value = image, False
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volumes = []
self.compute_manager._do_container_create(self.context, container,
networks, volumes)
mock_save.assert_called_with(self.context)
mock_pull.assert_any_call(self.context, container.image, '',
'always', 'glance', registry=None)
mock_create.assert_called_once_with(self.context, container, image,
networks, volumes)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_create'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(manager.Manager, '_fail_container')
def test_container_create_pull_image_failed_docker_error(
self, mock_fail, mock_pull, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_pull.side_effect = exception.DockerError("Pull Failed")
networks = []
volumes = []
self.assertRaises(exception.DockerError,
self.compute_manager._do_container_create,
self.context, container, networks, volumes)
mock_fail.assert_called_once_with(self.context,
container, "Pull Failed")
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_create'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(manager.Manager, '_fail_container')
def test_container_create_pull_image_failed_image_not_found(
self, mock_fail, mock_pull, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_pull.side_effect = exception.ImageNotFound("Image Not Found")
networks = []
volumes = []
self.assertRaises(exception.ImageNotFound,
self.compute_manager._do_container_create,
self.context, container, networks, volumes)
mock_fail.assert_called_once_with(self.context,
container, "Image Not Found")
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_create'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(manager.Manager, '_fail_container')
def test_container_create_pull_image_failed_zun_exception(
self, mock_fail, mock_pull, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_pull.side_effect = exception.ZunException(
message="Image Not Found")
networks = []
volumes = []
self.assertRaises(
exception.ZunException,
self.compute_manager._do_container_create,
self.context, container, networks, volumes)
mock_fail.assert_called_once_with(self.context,
container, "Image Not Found")
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_create'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'create')
@mock.patch.object(manager.Manager, '_fail_container')
def test_container_create_docker_create_failed(
self, mock_fail, mock_create, mock_pull, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance',
'repo': 'test', 'tag': 'testtag'}
mock_pull.return_value = image, False
mock_create.side_effect = exception.DockerError("Creation Failed")
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volumes = []
self.assertRaises(exception.DockerError,
self.compute_manager._do_container_create,
self.context, container, networks, volumes)
mock_fail.assert_called_once_with(
self.context, container, "Creation Failed", unset_host=True)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_create'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'create')
@mock.patch.object(fake_driver, 'start')
def test_container_run(
self, mock_start, mock_create,
mock_is_volume_available, mock_attach_volume,
mock_detach_volume, mock_pull, mock_list_by_container, mock_count,
mock_save, mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance'}
mock_create.return_value = container
mock_pull.return_value = image, False
mock_is_volume_available.return_value = True, False
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
container.status = 'Stopped'
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volumes = {container.uuid: [FakeVolumeMapping()]}
self.compute_manager.container_create(
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
mock_pull.assert_any_call(self.context, container.image, '',
'always', 'glance', registry=None)
mock_create.assert_called_once_with(self.context, container, image,
networks, volumes)
mock_start.assert_called_once_with(self.context, container)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_not_called()
mock_is_volume_available.assert_called_once()
self.assertEqual(1, len(FakeVolumeMapping.volumes))
@mock.patch.object(fake_driver, 'delete_volume')
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_cinder_volume',
side_effect=FakeVolumeMapping.list_by_cinder_volume)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'create')
@mock.patch.object(fake_driver, 'start')
def test_container_run_driver_attach_failed(
self, mock_start, mock_create,
mock_is_volume_available, mock_attach_volume,
mock_detach_volume, mock_pull, mock_list_by_container,
mock_list_by_volume, mock_count, mock_save,
mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start, mock_delete_volume):
mock_is_volume_available.return_value = True, False
mock_attach_volume.side_effect = [None, base.TestingException("fake")]
container = Container(self.context, **utils.get_test_container())
vol = FakeVolumeMapping()
vol.auto_remove = True
vol2 = FakeVolumeMapping()
vol2.auto_remove = True
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance'}
mock_create.return_value = container
mock_pull.return_value = image, False
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
container.status = 'Stopped'
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volumes = {container.uuid: [vol, vol2]}
self.assertRaises(
base.TestingException,
self.compute_manager.container_create,
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
mock_pull.assert_not_called()
mock_create.assert_not_called()
mock_start.assert_not_called()
mock_attach_volume.assert_has_calls([
mock.call(mock.ANY, vol), mock.call(mock.ANY, vol2)])
mock_detach_volume.assert_has_calls([
mock.call(mock.ANY, vol)])
self.assertEqual(0, len(FakeVolumeMapping.volumes))
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_cinder_volume',
side_effect=FakeVolumeMapping.list_by_cinder_volume)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'pull_image')
def test_container_run_image_not_found(
self, mock_pull, mock_is_volume_available,
mock_attach_volume, mock_detach_volume,
mock_list_by_container, mock_list_by_volume, mock_count,
mock_save, mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start):
container_dict = utils.get_test_container(
image='test:latest', image_driver='docker',
image_pull_policy='ifnotpresent')
container = Container(self.context, **container_dict)
mock_is_volume_available.return_value = True, False
mock_pull.side_effect = exception.ImageNotFound(
message="Image Not Found")
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
networks = []
volumes = {container.uuid: [FakeVolumeMapping()]}
self.assertRaises(
exception.ImageNotFound,
self.compute_manager.container_create,
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
self.assertEqual('Error', container.status)
self.assertEqual('Image Not Found', container.status_reason)
mock_pull.assert_called_once_with(self.context, 'test', 'latest',
'ifnotpresent', 'docker',
registry=None)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_called_once()
mock_is_volume_available.assert_called_once()
self.assertEqual(0, len(FakeVolumeMapping.volumes))
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_cinder_volume',
side_effect=FakeVolumeMapping.list_by_cinder_volume)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'pull_image')
def test_container_run_image_pull_exception_raised(
self, mock_pull, mock_is_volume_available,
mock_attach_volume, mock_detach_volume,
mock_list_by_container, mock_list_by_volume, mock_count,
mock_save, mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start):
container_dict = utils.get_test_container(
image='test:latest', image_driver='docker',
image_pull_policy='ifnotpresent')
container = Container(self.context, **container_dict)
mock_is_volume_available.return_value = True, False
mock_pull.side_effect = exception.ZunException(
message="Image Not Found")
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
networks = []
volumes = {container.uuid: [FakeVolumeMapping()]}
self.assertRaises(
exception.ZunException,
self.compute_manager.container_create,
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
self.assertEqual('Error', container.status)
self.assertEqual('Image Not Found', container.status_reason)
mock_pull.assert_called_once_with(self.context, 'test', 'latest',
'ifnotpresent', 'docker',
registry=None)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_called_once()
mock_is_volume_available.assert_called_once()
self.assertEqual(0, len(FakeVolumeMapping.volumes))
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_cinder_volume',
side_effect=FakeVolumeMapping.list_by_cinder_volume)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'pull_image')
def test_container_run_image_pull_docker_error(
self, mock_pull, mock_is_volume_available,
mock_attach_volume, mock_detach_volume,
mock_list_by_container, mock_list_by_volume, mock_count,
mock_save, mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start):
container_dict = utils.get_test_container(
image='test:latest', image_driver='docker',
image_pull_policy='ifnotpresent')
container = Container(self.context, **container_dict)
mock_is_volume_available.return_value = True, False
mock_pull.side_effect = exception.DockerError(
message="Docker Error occurred")
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
networks = []
volumes = {container.uuid: [FakeVolumeMapping()]}
self.assertRaises(
exception.DockerError,
self.compute_manager.container_create,
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
self.assertEqual('Error', container.status)
self.assertEqual('Docker Error occurred', container.status_reason)
mock_pull.assert_called_once_with(self.context, 'test', 'latest',
'ifnotpresent', 'docker',
registry=None)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_called_once()
mock_is_volume_available.assert_called_once()
self.assertEqual(0, len(FakeVolumeMapping.volumes))
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_cinder_volume',
side_effect=FakeVolumeMapping.list_by_cinder_volume)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'create')
def test_container_run_create_raises_docker_error(
self, mock_create, mock_pull, mock_is_volume_available,
mock_attach_volume, mock_detach_volume,
mock_list_by_container, mock_list_by_volume, mock_count,
mock_save, mock_spawn_n, mock_action_finish,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance',
'repo': 'test', 'tag': 'testtag'}
mock_pull.return_value = image, True
mock_is_volume_available.return_value = True, False
mock_create.side_effect = exception.DockerError(
message="Docker Error occurred")
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volumes = {container.uuid: [FakeVolumeMapping()]}
self.assertRaises(
exception.DockerError,
self.compute_manager.container_create,
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=container,
limits=None, run=True)
mock_save.assert_called_with(self.context)
self.assertEqual('Error', container.status)
self.assertEqual('Docker Error occurred', container.status_reason)
mock_pull.assert_any_call(self.context, container.image, '',
'always', 'glance', registry=None)
mock_create.assert_called_once_with(
self.context, container, image, networks, volumes)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_called_once()
mock_is_volume_available.assert_called_once()
self.assertEqual(0, len(FakeVolumeMapping.volumes))
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.common.utils.spawn_n')
@mock.patch.object(objects.Capsule, 'save')
@mock.patch.object(objects.CapsuleContainer, 'list_by_capsule_id')
@mock.patch.object(objects.CapsuleInitContainer, 'list_by_capsule_id')
@mock.patch.object(VolumeMapping, 'count',
side_effect=FakeVolumeMapping.count)
@mock.patch.object(VolumeMapping, 'list_by_container',
side_effect=FakeVolumeMapping.list_by_container)
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'detach_volume')
@mock.patch.object(fake_driver, 'attach_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(fake_driver, 'create_capsule')
@mock.patch.object(fake_driver, 'start')
def test_capsule_create(
self, mock_start, mock_create,
mock_is_volume_available, mock_attach_volume,
mock_detach_volume, mock_pull, mock_list_by_container, mock_count,
mock_init_container_list, mock_capsule_container_list,
mock_save, mock_spawn_n, mock_action_finish, mock_event_finish,
mock_event_start):
capsule = objects.Capsule(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance'}
mock_create.return_value = capsule
container_uuid = uuidutils.generate_uuid()
container = objects.CapsuleContainer(
self.context, **utils.get_test_container(uuid=container_uuid))
mock_capsule_container_list.return_value = [container]
container_uuid = uuidutils.generate_uuid()
init_container = objects.CapsuleInitContainer(
self.context, **utils.get_test_container(uuid=container_uuid))
mock_init_container_list.return_value = [init_container]
mock_pull.return_value = image, False
mock_is_volume_available.return_value = True, False
mock_spawn_n.side_effect = lambda f, *x, **y: f(*x, **y)
capsule.status = 'Running'
self.compute_manager._resource_tracker = FakeResourceTracker()
networks = []
volmap1 = FakeVolumeMapping()
volmap1.uuid = 'fake-uuid-1'
volmap2 = FakeVolumeMapping()
volmap2.uuid = 'fake-uuid-2'
volumes = {container.uuid: [volmap1],
init_container.uuid: [volmap2]}
def attach_volume(context, volmap):
volmap.connection_info = 'fake-info'
mock_attach_volume.side_effect = attach_volume
self.compute_manager.container_create(
self.context,
requested_networks=networks,
requested_volumes=volumes,
container=capsule,
limits=None, run=True)
mock_save.assert_called_with(self.context)
mock_pull.assert_any_call(self.context, capsule.image, '',
'always', 'glance', registry=None)
mock_create.assert_called_once_with(self.context, capsule, image,
networks, volumes)
mock_start.assert_called_once_with(self.context, capsule)
mock_attach_volume.assert_called_once()
mock_detach_volume.assert_not_called()
self.assertEqual(2, mock_is_volume_available.call_count)
self.assertEqual(2, len(FakeVolumeMapping.volumes))
@mock.patch.object(FakeResourceTracker,
'remove_usage_from_container')
@mock.patch.object(Container, 'destroy')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'list_by_container')
@mock.patch.object(fake_driver, 'delete')
def test_container_delete(
self, mock_delete, mock_list_by_container, mock_save,
mock_cnt_destroy, mock_remove_usage):
mock_list_by_container.return_value = []
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_delete(self. context, container,
False)
mock_save.assert_called_with(self.context)
mock_delete.assert_called_once_with(self.context, container, False)
mock_cnt_destroy.assert_called_once_with(self.context)
mock_remove_usage.assert_called_once_with(self.context, container,
True)
@mock.patch.object(FakeResourceTracker,
'remove_usage_from_container')
@mock.patch.object(Container, 'destroy')
@mock.patch.object(manager.Manager, '_fail_container')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'delete')
def test_container_delete_failed(self, mock_delete, mock_save,
mock_fail, mock_destroy,
mock_remove_usage):
container = Container(self.context, **utils.get_test_container())
mock_delete.side_effect = exception.DockerError(
message="Docker Error occurred")
self.assertRaises(exception.DockerError,
self.compute_manager._do_container_delete,
self.context, container, False)
mock_save.assert_called_with(self.context)
mock_fail.assert_called_with(self.context,
container, 'Docker Error occurred')
mock_destroy.assert_not_called()
mock_remove_usage.assert_not_called()
@mock.patch.object(FakeResourceTracker,
'remove_usage_from_container')
@mock.patch.object(Container, 'destroy')
@mock.patch.object(manager.Manager, '_fail_container')
@mock.patch.object(Container, 'save')
@mock.patch.object(VolumeMapping, 'list_by_container')
@mock.patch.object(fake_driver, 'delete')
def test_container_delete_failed_force(self, mock_delete,
mock_list_by_container,
mock_save,
mock_fail, mock_destroy,
mock_remove_usage):
mock_list_by_container.return_value = []
container = Container(self.context, **utils.get_test_container())
mock_delete.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_delete(self.context, container,
True)
mock_save.assert_called_with(self.context)
mock_fail.assert_called_with(self.context,
container, 'Docker Error occurred')
mock_destroy.assert_called_once_with(self.context)
mock_remove_usage.assert_called_once_with(self.context, container,
True)
@mock.patch.object(fake_driver, 'show')
def test_container_show(self, mock_show):
container = Container(self.context, **utils.get_test_container())
self.compute_manager.container_show(self.context, container)
mock_show.assert_called_once_with(self.context, container)
@mock.patch.object(fake_driver, 'show')
def test_container_show_failed(self, mock_show):
container = Container(self.context, **utils.get_test_container())
mock_show.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_show,
self.context, container)
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch('zun.compute.manager.Manager._get_vol_info')
@mock.patch('zun.compute.manager.Manager._get_network_info')
@mock.patch.object(fake_driver, 'pull_image')
@mock.patch.object(fake_driver, 'check_container_exist')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'create')
@mock.patch.object(fake_driver, 'delete')
def test_container_rebuild(self, mock_delete, mock_create,
mock_save, mock_check, mock_pull,
mock_get_network_info, mock_get_vol_info,
mock_action_finish,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
image = {'image': 'repo', 'path': 'out_path', 'driver': 'glance'}
mock_pull.return_value = image, False
container.status = 'Created'
mock_get_network_info.return_value = []
mock_get_vol_info.return_value = []
mock_check.return_value = True
self.compute_manager._do_container_rebuild(
self.context, container, False)
mock_save.assert_called_with(self.context)
self.assertTrue(mock_create.called)
mock_delete.assert_called_once_with(self.context, container, True)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
mock_action_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_rebuild'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch.object(Container, 'save')
@mock.patch('zun.compute.manager.Manager._get_vol_info')
@mock.patch('zun.compute.manager.Manager._get_network_info')
@mock.patch.object(manager.Manager, '_fail_container')
def test_container_rebuild_failed(
self, mock_fail, mock_get_network_info, mock_get_vol_info,
mock_save, mock_action_finish, mock_event_finish,
mock_event_start):
mock_get_vol_info.return_value = []
fake_exc = exception.PortNotFound(port='fake-port')
mock_get_network_info.side_effect = fake_exc
container = Container(self.context, **utils.get_test_container())
self.assertRaises(exception.PortNotFound,
self.compute_manager._do_container_rebuild,
self.context, container, True)
mock_fail.assert_called_with(self.context,
container, str(fake_exc))
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
mock_action_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_rebuild'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'reboot')
def test_container_reboot(self, mock_reboot, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_reboot(self.context, container, 10)
mock_save.assert_called_with(self.context)
mock_reboot.assert_called_once_with(self.context, container, 10)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_reboot'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'reboot')
def test_container_reboot_failed(self, mock_reboot, mock_save,
mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_reboot.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_reboot(self.context, container, 10)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_reboot'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'stop')
def test_container_stop(self, mock_stop, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_stop(self.context, container, 10)
mock_save.assert_called_with(self.context)
mock_stop.assert_called_once_with(self.context, container, 10)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_stop'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'stop')
def test_container_stop_failed(self, mock_stop, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_stop.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_stop(self.context, container, 10)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_stop'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'start')
def test_container_start(self, mock_start, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_start(self.context, container)
mock_save.assert_called_with(self.context)
mock_start.assert_called_once_with(self.context, container)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_start'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(manager.Manager, '_fail_container')
@mock.patch.object(fake_driver, 'start')
def test_container_start_failed(self, mock_start,
mock_fail, mock_save, mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_start.side_effect = exception.DockerError(
message="Docker Error occurred")
self.assertRaises(exception.DockerError,
self.compute_manager._do_container_start,
self.context, container)
mock_save.assert_called_with(self.context)
mock_fail.assert_called_with(self.context,
container, 'Docker Error occurred')
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_start'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pause')
def test_container_pause(self, mock_pause, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_pause(self.context, container)
mock_pause.assert_called_once_with(self.context, container)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_pause'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'pause')
def test_container_pause_failed(self, mock_pause, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_pause.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_pause(self.context, container)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_pause'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'unpause')
def test_container_unpause(self, mock_unpause, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_unpause(self.context, container)
mock_unpause.assert_called_once_with(self.context, container)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_unpause'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'unpause')
def test_container_unpause_failed(self, mock_unpause, mock_save,
mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_unpause.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_unpause(self.context, container)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_unpause'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(fake_driver, 'show_logs')
def test_container_logs(self, mock_logs):
container = Container(self.context, **utils.get_test_container())
self.compute_manager.container_logs(self.context,
container, True, True,
False, 'all', None)
mock_logs.assert_called_once_with(
self.context, container, stderr=True, stdout=True,
timestamps=False, tail='all', since=None)
@mock.patch.object(fake_driver, 'show_logs')
def test_container_logs_failed(self, mock_logs):
container = Container(self.context, **utils.get_test_container())
mock_logs.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_logs,
self.context, container, True, True,
False, 'all', None)
@mock.patch.object(fake_driver, 'execute_run')
@mock.patch.object(fake_driver, 'execute_create')
def test_container_execute(self, mock_execute_create, mock_execute_run):
mock_execute_create.return_value = 'fake_exec_id'
mock_execute_run.return_value = 'fake_output', 'fake_exit_code'
container = Container(self.context, **utils.get_test_container())
result = self.compute_manager.container_exec(
self.context, container, 'fake_cmd', True, False)
self.assertEqual('fake_output', result.get('output'))
self.assertEqual('fake_exit_code', result.get('exit_code'))
self.assertIsNone(result.get('exec_id'))
self.assertIsNone(result.get('token'))
mock_execute_create.assert_called_once_with(
self.context, container, 'fake_cmd', False)
mock_execute_run.assert_called_once_with('fake_exec_id', 'fake_cmd')
@mock.patch.object(ExecInstance, 'create')
@mock.patch.object(fake_driver, 'execute_run')
@mock.patch.object(fake_driver, 'execute_create')
def test_container_execute_interactive(
self, mock_execute_create, mock_execute_run, mock_create):
mock_execute_create.return_value = 'fake_exec_id'
container = Container(self.context, **utils.get_test_container())
result = self.compute_manager.container_exec(
self.context, container, 'fake_cmd', False, True)
self.assertIsNone(result.get('output'))
self.assertIsNone(result.get('exit_code'))
self.assertEqual('fake_exec_id', result.get('exec_id'))
self.assertIsNotNone(result.get('token'))
mock_execute_create.assert_called_once_with(
self.context, container, 'fake_cmd', True)
mock_execute_run.assert_not_called()
@mock.patch.object(fake_driver, 'execute_create')
def test_container_execute_failed(self, mock_execute_create):
container = Container(self.context, **utils.get_test_container())
mock_execute_create.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_exec,
self.context, container, 'fake_cmd', True, False)
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'kill')
def test_container_kill(self, mock_kill, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_container_kill(self.context, container, None)
mock_kill.assert_called_once_with(self.context, container, None)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_kill'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'kill')
def test_container_kill_failed(self, mock_kill, mock_save,
mock_event_finish,
mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_kill.side_effect = exception.DockerError(
message="Docker Error occurred")
self.compute_manager._do_container_kill(self.context, container, None)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_kill'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'update')
def test_container_update(self, mock_update, mock_save):
container = Container(self.context, **utils.get_test_container())
self.compute_manager.container_update(self.context, container,
{'memory': 512})
mock_save.assert_called_with(self.context)
mock_update.assert_called_once_with(self.context, container)
@mock.patch.object(fake_driver, 'update')
def test_container_update_failed(self, mock_update):
container = Container(self.context, **utils.get_test_container())
mock_update.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_update,
self.context, container, {})
@mock.patch.object(fake_driver, 'update')
def test_container_update_failed_resources(self, mock_update):
container = Container(self.context, **utils.get_test_container())
mock_update.side_effect = exception.ResourcesUnavailable
self.assertRaises(exception.ResourcesUnavailable,
self.compute_manager.container_update,
self.context, container, {})
@mock.patch.object(fake_driver, 'get_websocket_url')
@mock.patch.object(Container, 'save')
def test_container_attach_successful(self, mock_save,
mock_get_websocket_url):
container = Container(self.context, **utils.get_test_container())
mock_get_websocket_url.return_value = "ws://test"
self.compute_manager.container_attach(self.context, container)
mock_get_websocket_url.assert_called_once_with(self.context, container)
mock_save.assert_called_once_with(self.context)
@mock.patch.object(fake_driver, 'get_websocket_url')
def test_container_attach_failed(self, mock_get_websocket_url):
container = Container(self.context, **utils.get_test_container())
mock_get_websocket_url.side_effect = Exception
self.assertRaises(exception.ZunException,
self.compute_manager.container_attach,
self.context, container)
@mock.patch.object(fake_driver, 'resize')
def test_container_resize(self, mock_resize):
container = Container(self.context, **utils.get_test_container())
self.compute_manager.container_resize(
self.context, container, "100", "100")
mock_resize.assert_called_once_with(
self.context, container, "100", "100")
@mock.patch.object(fake_driver, 'resize')
def test_container_resize_failed(self, mock_resize):
container = Container(self.context, **utils.get_test_container())
mock_resize.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_resize,
self.context, container, "100", "100")
@mock.patch.object(fake_driver, 'inspect_image')
@mock.patch.object(Image, 'save')
@mock.patch.object(fake_driver, 'pull_image')
def test_image_pull(self, mock_pull, mock_save, mock_inspect):
image = Image(self.context, **utils.get_test_image())
ret = {'image': 'repo', 'path': 'out_path', 'driver': 'docker'}
mock_pull.return_value = ret, True
mock_inspect.return_value = {'Id': 'fake-id', 'Size': 512,
'RepoTags': ['image1:latest']}
self.compute_manager._do_image_pull(self.context, image)
mock_pull.assert_any_call(self.context, image.repo, image.tag,
driver_name='docker')
mock_save.assert_called_once()
mock_inspect.assert_called_once_with(image.repo + ":" + image.tag)
@mock.patch.object(fake_driver, 'load_image')
@mock.patch.object(fake_driver, 'inspect_image')
@mock.patch.object(Image, 'save')
@mock.patch.object(fake_driver, 'pull_image')
def test_image_pull_not_loaded(self, mock_pull, mock_save,
mock_inspect, mock_load):
image = Image(self.context, **utils.get_test_image())
repo_tag = image.repo + ":" + image.tag
ret = {'image': 'repo', 'path': 'out_path', 'driver': 'docker',
'tags': ['latest']}
mock_pull.return_value = ret, False
mock_inspect.return_value = {'Id': 'fake-id', 'Size': 512,
'RepoTags': ['image1:latest']}
self.compute_manager._do_image_pull(self.context, image)
mock_pull.assert_any_call(self.context, image.repo, image.tag,
driver_name='docker')
mock_save.assert_called_once()
mock_inspect.assert_called_once_with(repo_tag)
mock_load.assert_called_once_with(ret['path'])
@mock.patch.object(fake_driver, 'inspect_image')
@mock.patch.object(Image, 'save')
@mock.patch.object(fake_driver, 'pull_image')
def test_image_pull_tag_is_none(self, mock_pull, mock_save, mock_inspect):
image = Image(self.context, **utils.get_test_image(tag=None))
ret = {'image': 'repo', 'path': 'out_path', 'driver': 'docker'}
mock_pull.return_value = ret, True
mock_inspect.return_value = {'Id': 'fake-id', 'Size': 512,
'RepoTags': ['image1:latest']}
self.compute_manager._do_image_pull(self.context, image)
mock_pull.assert_any_call(self.context, image.repo, None,
driver_name='docker')
mock_save.assert_called_once()
mock_inspect.assert_called_once_with(image.repo)
@mock.patch.object(fake_driver, 'execute_resize')
def test_container_exec_resize(self, mock_resize):
self.compute_manager.container_exec_resize(
self.context, 'fake_exec_id', "100", "100")
mock_resize.assert_called_once_with('fake_exec_id', "100", "100")
@mock.patch.object(fake_driver, 'execute_resize')
def test_container_exec_resize_failed(self, mock_resize):
mock_resize.side_effect = exception.DockerError
self.assertRaises(exception.DockerError,
self.compute_manager.container_exec_resize,
self.context, 'fake_exec_id', "100", "100")
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch.object(fake_driver, 'upload_image_data')
@mock.patch.object(fake_driver, 'get_image')
@mock.patch.object(fake_driver, 'commit')
@mock.patch.object(fake_driver, 'pause')
@mock.patch.object(fake_driver, 'unpause')
@mock.patch.object(Container, 'save')
def test_container_commit(
self, mock_save, mock_unpause, mock_pause, mock_commit,
mock_get_image, mock_upload_image_data, mock_action_finish,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container(
status=consts.PAUSED))
mock_get_image_response = mock.MagicMock()
mock_get_image_response.data = StringIO().read()
mock_get_image.return_value = mock_get_image_response
mock_upload_image_data.return_value = mock.MagicMock()
self.compute_manager._do_container_commit(self.context,
mock_get_image_response,
container, 'repo', 'tag')
mock_commit.assert_called_once_with(
self.context, container, 'repo', 'tag')
mock_pause.assert_not_called()
mock_unpause.assert_not_called()
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_commit'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch.object(fake_driver, 'upload_image_data')
@mock.patch.object(fake_driver, 'get_image')
@mock.patch.object(fake_driver, 'commit')
@mock.patch.object(fake_driver, 'pause')
@mock.patch.object(fake_driver, 'unpause')
@mock.patch.object(Container, 'save')
def test_container_commit_with_pause(
self, mock_save, mock_unpause, mock_pause, mock_commit,
mock_get_image, mock_upload_image_data, mock_action_finish,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_get_image_response = mock.MagicMock()
mock_get_image_response.data = StringIO().read()
mock_get_image.return_value = mock_get_image_response
mock_upload_image_data.return_value = mock.MagicMock()
mock_unpause.return_value = container
mock_pause.return_value = container
self.compute_manager._do_container_commit(self.context,
mock_get_image_response,
container, 'repo', 'tag')
mock_commit.assert_called_once_with(
self.context, container, 'repo', 'tag')
mock_pause.assert_called_once_with(self.context, container)
mock_unpause.assert_called_once_with(self.context, container)
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_commit'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(ContainerAction, 'action_finish')
@mock.patch.object(fake_driver, 'delete_committed_image')
@mock.patch.object(fake_driver, 'commit')
@mock.patch.object(fake_driver, 'pause')
@mock.patch.object(fake_driver, 'unpause')
@mock.patch.object(Container, 'save')
def test_container_commit_failed(self, mock_save, mock_unpause, mock_pause,
mock_commit, mock_delete,
mock_action_finish,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
mock_get_image_response = mock.MagicMock()
mock_get_image_response.data = StringIO().read()
mock_commit.side_effect = exception.DockerError
mock_unpause.return_value = container
mock_pause.return_value = container
self.assertRaises(exception.DockerError,
self.compute_manager._do_container_commit,
self.context, mock_get_image_response, container,
'repo', 'tag')
self.assertTrue(mock_delete.called)
mock_commit.assert_called_once_with(
self.context, container, 'repo', 'tag')
mock_pause.assert_called_once_with(self.context, container)
mock_unpause.assert_called_once_with(self.context, container)
self.assertEqual(
(self.context, container.uuid, 'compute__do_container_commit'),
mock_event_finish.call_args[0])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNotNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'network_detach')
def test_container_network_detach(self, mock_detach, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_network_detach(self.context, container,
'network')
mock_detach.assert_called_once_with(self.context, container, mock.ANY)
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_network_detach'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(ContainerActionEvent, 'event_start')
@mock.patch.object(ContainerActionEvent, 'event_finish')
@mock.patch.object(Container, 'save')
@mock.patch.object(fake_driver, 'network_attach')
def test_container_network_attach(self, mock_attach, mock_save,
mock_event_finish, mock_event_start):
container = Container(self.context, **utils.get_test_container())
self.compute_manager._do_network_attach(self.context, container,
'network')
mock_save.assert_called_with(self.context)
mock_event_start.assert_called_once()
mock_event_finish.assert_called_once()
self.assertEqual(
(self.context, container.uuid, 'compute__do_network_attach'),
mock_event_finish.call_args[0])
self.assertIsNone(mock_event_finish.call_args[1]['exc_val'])
self.assertIsNone(mock_event_finish.call_args[1]['exc_tb'])
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(manager.Manager, '_fail_container')
def test_wait_for_volumes_available(self, mock_fail,
mock_is_volume_available):
mock_is_volume_available.return_value = True, False
container = Container(self.context, **utils.get_test_container())
FakeVolumeMapping.container_uuid = container.uuid
volumes = {container.uuid: [FakeVolumeMapping()]}
self.compute_manager._wait_for_volumes_available(self.context,
volumes,
container)
mock_is_volume_available.assert_called_once()
mock_fail.assert_not_called()
@mock.patch.object(fake_driver, 'delete_volume')
@mock.patch.object(fake_driver, 'is_volume_available')
@mock.patch.object(manager.Manager, '_fail_container')
def test_wait_for_volumes_available_failed(self, mock_fail,
mock_is_volume_available,
mock_delete_volume):
mock_is_volume_available.return_value = False, True
container = Container(self.context, **utils.get_test_container())
FakeVolumeMapping.container_uuid = container.uuid
volume = FakeVolumeMapping()
volume.auto_remove = True
volumes = {container.uuid: [volume]}
self.assertRaises(exception.Conflict,
self.compute_manager._wait_for_volumes_available,
self.context, volumes, container, timeout=2)
self.assertTrue(mock_is_volume_available.called)
self.assertTrue(mock_fail.called)
self.assertTrue(mock_delete_volume.called)
@mock.patch.object(fake_driver, 'create_network')
def test_network_create(self, mock_create):
network = Network(self.context, **utils.get_test_network())
ret = ({'Id': '0eeftestnetwork'})
mock_create.return_value = ret
self.compute_manager.network_create(self.context, network)
mock_create.assert_any_call(self.context, network)
@mock.patch.object(fake_driver, 'delete_network')
def test_network_delete(self, mock_delete):
network = Network(self.context, **utils.get_test_network())
self.compute_manager.network_delete(self.context, network)
mock_delete.assert_any_call(self.context, network)
| 51.492414
| 79
| 0.666801
| 8,405
| 74,664
| 5.57561
| 0.037597
| 0.055118
| 0.088343
| 0.043787
| 0.887395
| 0.873354
| 0.8575
| 0.845742
| 0.819559
| 0.796535
| 0
| 0.003071
| 0.232334
| 74,664
| 1,449
| 80
| 51.52795
| 0.81454
| 0.007688
| 0
| 0.730655
| 0
| 0
| 0.081204
| 0.017226
| 0
| 0
| 0
| 0
| 0.22247
| 1
| 0.059524
| false
| 0.000744
| 0.014137
| 0.004464
| 0.080357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37dcf9b3150f595d3f69cb9a95588f87b8d3fb7d
| 390
|
py
|
Python
|
smoothcrawler/persistence/file.py
|
Chisanan232/pytsunami
|
099f16e6351b8cfcce4528d53273c381c1dc3bf9
|
[
"Apache-2.0"
] | 1
|
2022-03-18T15:20:56.000Z
|
2022-03-18T15:20:56.000Z
|
smoothcrawler/persistence/file.py
|
Chisanan232/smoothcrawler
|
324c52b941d7e73c3d087408f5ba9bd02e49d66c
|
[
"Apache-2.0"
] | null | null | null |
smoothcrawler/persistence/file.py
|
Chisanan232/smoothcrawler
|
324c52b941d7e73c3d087408f5ba9bd02e49d66c
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC
from multirunnable.persistence.file import SavingStrategy, SavingMediator
from multirunnable.persistence.file.files import File
from multirunnable.persistence.file.archivers import Archiver
from multirunnable.persistence.file.saver import FileSaver, ArchiverSaver
from multirunnable.persistence.file.layer import BaseFao
class BaseCrawlerFao(BaseFao, ABC):
pass
| 30
| 73
| 0.851282
| 45
| 390
| 7.377778
| 0.422222
| 0.256024
| 0.421687
| 0.481928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097436
| 390
| 12
| 74
| 32.5
| 0.943182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 0.75
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
37fb86c7d9d7b78aba0f5c8a43a6e91a3f28a7f0
| 112
|
py
|
Python
|
exercises/vector/test_vector_neg.py
|
fluentpython/pythonic-api
|
764cb9dba9418c591d6d0cef20401b58d8ce0b1b
|
[
"MIT"
] | 58
|
2016-05-10T22:41:14.000Z
|
2020-09-25T23:47:18.000Z
|
exercises/vector/test_vector_neg.py
|
afcarl/pythonic-api
|
764cb9dba9418c591d6d0cef20401b58d8ce0b1b
|
[
"MIT"
] | 1
|
2017-01-31T04:20:31.000Z
|
2017-01-31T04:20:31.000Z
|
exercises/vector/test_vector_neg.py
|
afcarl/pythonic-api
|
764cb9dba9418c591d6d0cef20401b58d8ce0b1b
|
[
"MIT"
] | 16
|
2016-05-17T17:01:25.000Z
|
2018-11-09T10:10:07.000Z
|
from vector import Vector
def test_vector_unary_minus():
assert -Vector([1, 2, 3]) == Vector([-1, -2, -3])
| 22.4
| 53
| 0.642857
| 18
| 112
| 3.833333
| 0.611111
| 0.202899
| 0.231884
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.169643
| 112
| 4
| 54
| 28
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
531dc81c8e359018cecbb4b03d31aae7a81695f0
| 42,833
|
py
|
Python
|
vinfo/testing.py
|
john-hewitt/conditional-probing
|
bebc90aa0c910395e2370910409076a945279fe0
|
[
"Apache-2.0"
] | 13
|
2021-09-21T11:07:33.000Z
|
2022-03-25T08:46:46.000Z
|
vinfo/testing.py
|
john-hewitt/conditional-probing
|
bebc90aa0c910395e2370910409076a945279fe0
|
[
"Apache-2.0"
] | 2
|
2021-09-25T15:45:19.000Z
|
2021-12-10T15:57:35.000Z
|
vinfo/testing.py
|
john-hewitt/conditional-probing
|
bebc90aa0c910395e2370910409076a945279fe0
|
[
"Apache-2.0"
] | 2
|
2021-09-27T01:21:49.000Z
|
2021-09-28T06:08:19.000Z
|
import unittest
import tempfile
import glob
import os
import torch
import torch.nn as nn
import numpy as np
import model
import dataset
import task
import cache
import probe
import trainer
import reporter
from utils import TRAIN_STR, DEV_STR, TEST_STR
"""
Test suites for the codebase
"""
class DataTest(unittest.TestCase):
def assert_2darray_equal(self, gold_sentence, other_sentence):
"""
Sentences are effectively 2D arrays of strings;
this checks whether they contain the same objects
"""
for index, array in enumerate(gold_sentence):
for index2, elt in enumerate (array):
self.assertEqual(elt, other_sentence[index][index2])
def assert_3darray_equal(self, gold_sentence, other_sentence):
"""
Sentences are effectively 2D arrays of strings;
this checks whether they contain the same objects
"""
for index, array in enumerate(gold_sentence):
for index2, array2 in enumerate (array):
for index3, elt in enumerate (array2):
self.assertEqual(elt, other_sentence[index][index2][index3])
def assert_annotations_equal(self, gold_annotation, other_annotation):
"""
Annotations are effectively arrays;
this checks whether they contain the same values
"""
for index, elt in enumerate(gold_annotation):
self.assertEqual(elt, other_annotation[index])
class OntonotesReaderTest(DataTest):
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun""")
self.tmpfile1.flush()
self.tmpfile2 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile2.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun\n\nIt\tDet\nis\tVerb\ntasty\tAdj\n\n""")
self.tmpfile2.flush()
def tearDown(self):
self.tmpfile1.close()
self.tmpfile2.close()
def test_one_sentence(self):
"""
Tests that a one-sentence dataset can be read directly;
the dataset does not have a blank line at the end;
"""
gold_sentence = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None).yield_dataset(TRAIN_STR)
self.assert_2darray_equal(gold_sentence, next(generator))
with self.assertRaises(StopIteration):
next(generator)
generator = dataset.OntonotesReader(None, None, self.tmpfile1.name, None, None).yield_dataset(DEV_STR)
self.assert_2darray_equal(gold_sentence, next(generator))
with self.assertRaises(StopIteration):
next(generator)
generator = dataset.OntonotesReader(None, None, None, self.tmpfile1.name, None).yield_dataset(TEST_STR)
self.assert_2darray_equal(gold_sentence, next(generator))
with self.assertRaises(StopIteration):
next(generator)
def test_two_sentences_with_gap(self):
"""
Tests that a tw-sentence dataset can be read directly;
the dataset has an extra blank line at the end that must
be discarded.
"""
gold_sentence1 = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
gold_sentence2 = (('It', 'Det'), ('is', 'Verb'), ('tasty', 'Adj'))
generator = dataset.OntonotesReader(None, self.tmpfile2.name, None, None, None).yield_dataset(TRAIN_STR)
self.assert_2darray_equal(gold_sentence1, next(generator))
self.assert_2darray_equal(gold_sentence2, next(generator))
with self.assertRaises(StopIteration):
next(generator)
generator = dataset.OntonotesReader(None, None, self.tmpfile2.name, None, None).yield_dataset(DEV_STR)
self.assert_2darray_equal(gold_sentence1, next(generator))
self.assert_2darray_equal(gold_sentence2, next(generator))
with self.assertRaises(StopIteration):
next(generator)
generator = dataset.OntonotesReader(None, None, None, self.tmpfile2.name, None).yield_dataset(TEST_STR)
self.assert_2darray_equal(gold_sentence1, next(generator))
self.assert_2darray_equal(gold_sentence2, next(generator))
with self.assertRaises(StopIteration):
next(generator)
class TokenClassificationTaskTest(DataTest):
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun""")
self.tmpfile1.flush()
self.tmpfile2 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile2.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun\n\nIt\tDet\nis\tVerb\ntasty\tAdj\n\n""")
self.tmpfile2.flush()
self.tmpfile3 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile3.write("""Good\tAdj\n""")
self.tmpfile3.flush()
def tearDown(self):
for cache_filename in glob.glob(self.tmpfile1.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile1.close()
for cache_filename in glob.glob(self.tmpfile2.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile2.close()
for cache_filename in glob.glob(self.tmpfile3.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile3.close()
def assert_annotations_equal(self, gold_annotation, other_annotation):
"""
Annotations are effectively arrays;
this checks whether they contain the same values
"""
for index, elt in enumerate(gold_annotation):
self.assertEqual(elt, other_annotation[index])
def test_no_cache_labels_of_sentence(self):
"""
Tests whether attributes can be accessed by name by the
TokenClassificationTask annotator
"""
sentence1 = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
token_annotation_sentence_1 = [1, 2, 3]
pos_annotation_sentence_1 = [1, 2, 1]
sentence2 = (('It', 'Det'), ('is', 'Verb'), ('tasty', 'Adj'))
token_annotation_sentence_2 = [4, 5, 6]
pos_annotation_sentence_2 = [3, 2, 4]
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
self.assert_annotations_equal(token_annotation_sentence_1, token_task.labels_of_sentence(sentence1, TRAIN_STR))
self.assert_annotations_equal(token_annotation_sentence_2, token_task.labels_of_sentence(sentence2, TRAIN_STR))
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
self.assert_annotations_equal(pos_annotation_sentence_1, pos_task.labels_of_sentence(sentence1, TRAIN_STR))
self.assert_annotations_equal(pos_annotation_sentence_2, pos_task.labels_of_sentence(sentence2, TRAIN_STR))
def test_make_cache_labels_of_sentence(self):
"""
Tests whether, when the cache for an annotation is missing,
sentences are labeled correctly.
"""
sentence1 = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
token_annotation_sentence_1 = [1, 2, 3]
pos_annotation_sentence_1 = [1, 2, 1]
sentence2 = (('It', 'Det'), ('is', 'Verb'), ('tasty', 'Adj'))
token_annotation_sentence_2 = [4, 5, 6]
pos_annotation_sentence_2 = [3, 2, 4]
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
train_path = self.tmpfile1.name
dev_path = self.tmpfile2.name
test_path = self.tmpfile3.name
cache_model = cache.WholeDatasetCache(train_path, dev_path, test_path)
token_task = task.TokenClassificationTask(args, 'Token', input_fields, cache_model)
self.assertIsNone(token_task.train_cache)
self.assert_annotations_equal(token_annotation_sentence_1, token_task.labels_of_sentence(sentence1, TRAIN_STR))
self.assert_annotations_equal(token_annotation_sentence_2, token_task.labels_of_sentence(sentence2, TRAIN_STR))
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields, cache_model)
self.assertIsNone(pos_task.train_cache)
self.assert_annotations_equal(pos_annotation_sentence_1, pos_task.labels_of_sentence(sentence1, DEV_STR))
self.assert_annotations_equal(pos_annotation_sentence_2, pos_task.labels_of_sentence(sentence2, DEV_STR))
def test_use_cache_labels_of_sentence(self):
"""
Tests whether, when the cache for an annotation is missing,
sentences are labeled correctly.
"""
sentence1 = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
token_annotation_sentence_1 = [1, 2, 3]
pos_annotation_sentence_1 = [1, 2, 1]
sentence2 = (('It', 'Det'), ('is', 'Verb'), ('tasty', 'Adj'))
token_annotation_sentence_2 = [4, 5, 6]
pos_annotation_sentence_2 = [3, 2, 4]
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
train_path = self.tmpfile1.name
dev_path = self.tmpfile2.name
test_path = self.tmpfile3.name
# Write the cache
cache_model = cache.WholeDatasetCache(train_path, dev_path, test_path)
token_task = task.TokenClassificationTask(args, 'Token', input_fields, cache_model)
self.assert_annotations_equal(token_annotation_sentence_1, token_task.labels_of_sentence(sentence1, TRAIN_STR))
self.assert_annotations_equal(token_annotation_sentence_2, token_task.labels_of_sentence(sentence2, TRAIN_STR))
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields, cache_model)
self.assert_annotations_equal(pos_annotation_sentence_1, pos_task.labels_of_sentence(sentence1, DEV_STR))
self.assert_annotations_equal(pos_annotation_sentence_2, pos_task.labels_of_sentence(sentence2, DEV_STR))
# Use the cache
cache_model = cache.WholeDatasetCache(train_path, dev_path, test_path)
# Assert the caches still exist
self.assertTrue(os.path.exists(cache_model.get_cache_path(TRAIN_STR, 'Token')))
self.assertTrue(os.path.exists(cache_model.get_cache_path(TRAIN_STR, 'Token')))
self.assertTrue(os.path.exists(cache_model.get_cache_path(DEV_STR, 'PoS')))
self.assertTrue(os.path.exists(cache_model.get_cache_path(DEV_STR, 'PoS')))
token_task = task.TokenClassificationTask(args, 'Token', input_fields, cache_model)
self.assert_annotations_equal(token_annotation_sentence_1, token_task.labels_of_sentence(sentence1, TRAIN_STR))
self.assert_annotations_equal(token_annotation_sentence_2, token_task.labels_of_sentence(sentence2, TRAIN_STR))
self.assertIsNotNone(token_task.train_cache)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields, cache_model)
self.assert_annotations_equal(pos_annotation_sentence_1, pos_task.labels_of_sentence(sentence1, DEV_STR))
self.assert_annotations_equal(pos_annotation_sentence_2, pos_task.labels_of_sentence(sentence2, DEV_STR))
self.assertIsNotNone(pos_task.dev_cache)
class TestListDataset(DataTest):
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun""")
self.tmpfile1.flush()
self.tmpfile2 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile2.write("""I\tNoun\nenjoy\tVerb\npizza\tNoun\n\nIt\tDet\nis\tVerb\ntasty\tAdj\n\n""")
self.tmpfile2.flush()
self.tmpfile3 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile3.write("""Good\tAdj\n\nThis\tDet\nis\tVerb\na\tDet\nsentence\tNoun""")
self.tmpfile3.flush()
def tearDown(self):
for cache_filename in glob.glob(self.tmpfile1.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile1.close()
for cache_filename in glob.glob(self.tmpfile2.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile2.close()
for cache_filename in glob.glob(self.tmpfile3.name + '.cache.*'):
os.remove(cache_filename)
self.tmpfile3.close()
def test_single_annotation_input(self):
"""
Tests spec of a list dataset with a single annotation
input dataset.
"""
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
train_path = self.tmpfile1.name
dev_path = self.tmpfile2.name
test_path = self.tmpfile3.name
cache_model = cache.WholeDatasetCache(train_path, dev_path, test_path)
data_reader = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
sentence1 = (('I', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
token_annotation_sentence_1 = [1, 2, 3]
pos_annotation_sentence_1 = [1, 2, 1]
token_task = task.TokenClassificationTask(args, 'Token', input_fields, cache_model)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields, cache_model)
input_dataset = dataset.AnnotationData(args, token_task)
output_dataset = dataset.AnnotationData(args, pos_task)
list_dataset = dataset.ListDataset(args, data_reader, output_dataset, [input_dataset])
data = next(list_dataset.load_data(TRAIN_STR))
input_1 = data[0][0][0]
self.assert_annotations_equal(token_annotation_sentence_1, input_1)
output = data[1][0]
self.assert_annotations_equal(pos_annotation_sentence_1, output)
def test_two_annotation_input(self):
"""
Tests spec of a list dataset with two annotation
input dataset.
"""
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
train_path = self.tmpfile1.name
dev_path = self.tmpfile2.name
test_path = self.tmpfile3.name
cache_model = cache.WholeDatasetCache(train_path, dev_path, test_path)
data_reader = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
sentence1 = (('GI', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
token_annotation_sentence_1 = [1, 2, 3]
pos_annotation_sentence_1 = [1, 2, 1]
token_task = task.TokenClassificationTask(args, 'Token', input_fields, cache_model)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields, cache_model)
pos_task2 = task.TokenClassificationTask(args, 'PoS', input_fields)
input_dataset1 = dataset.AnnotationData(args, token_task)
input_dataset2 = dataset.AnnotationData(args, pos_task)
output_dataset = dataset.AnnotationData(args, pos_task2)
list_dataset = dataset.ListDataset(args, data_reader, output_dataset, [input_dataset1, input_dataset2])
data = next(list_dataset.load_data(TRAIN_STR))
input_1 = data[0][0][0]
self.assert_annotations_equal(token_annotation_sentence_1, input_1)
input_2 = data[0][1][0]
self.assert_annotations_equal(pos_annotation_sentence_1, input_2)
output = data[1][0]
self.assert_annotations_equal(pos_annotation_sentence_1, output)
input_dataset1 = dataset.AnnotationData(args, token_task)
def test_dataloader_and_collate_fn(self):
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
train_path = self.tmpfile1.name
dev_path = self.tmpfile2.name
test_path = self.tmpfile3.name
data_reader = dataset.OntonotesReader(None, self.tmpfile1.name, self.tmpfile2.name, self.tmpfile3.name, None)
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
pos_task2 = task.TokenClassificationTask(args, 'PoS', input_fields)
input_dataset1 = dataset.AnnotationData(args, token_task)
input_dataset2 = dataset.AnnotationData(args, pos_task)
output_dataset = dataset.AnnotationData(args, pos_task2)
list_dataset = dataset.ListDataset(args, data_reader, output_dataset, [input_dataset1, input_dataset2])
train_dataloader = list_dataset.get_test_dataloader(shuffle=False)
batch = next(iter(train_dataloader))
((inp_an, inpu_al), out_an, sent) = batch
self.assert_2darray_equal(torch.tensor([[1., 0., 0., 0.], [2., 3., 4., 5.]]), inp_an[0])
self.assert_2darray_equal(torch.tensor([[1., 0., 0., 0.], [2., 3., 2., 4.]]), inp_an[1])
self.assert_2darray_equal(torch.tensor([[1., 0., 0., 0.], [2., 3., 2., 4.]]), out_an)
self.assertEqual([[['Good', 'Adj']], [['This', 'Det'], ['is', 'Verb'], ['a', 'Det'], ['sentence', 'Noun']]], sent)
class TestAnnotationDataset(DataTest):
def test_annotation_alignment(self):
"""
Tests whether the alignment of annotation tensors to corpus-given tokens is
the identity alignment (and checks the annotation)
"""
args = {'device': 'cpu'}
input_fields = ['Token', 'PoS']
sentence1 = (('GI', 'Noun'), ('enjoy', 'Verb'), ('pizza', 'Noun'))
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
input_dataset = dataset.AnnotationData(args, pos_task)
annotation, alignment = input_dataset.tensor_of_sentence(sentence1, TRAIN_STR)
print(annotation)
print(alignment)
self.assert_annotations_equal([1,2,1], annotation)
self.assert_2darray_equal([[1,0,0],[0,1,0],[0,0,1]], alignment)
class TestHuggingfaceDataset(DataTest):
def test_one_sentence_input(self):
"""
Tests tokenization of a single sentence,
and alignment between the tokenization and the corpus-given tokenization
"""
model_string = "google/bert_uncased_L-2_H-128_A-2"
args = {'device': 'cpu'}
dataset_model = dataset.HuggingfaceData(args, model_string)
sentence1 = (('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun'))
wordpiece_indices, wordpiece_alignment_vecs = dataset_model.tensor_of_sentence(sentence1, TRAIN_STR)
self.assert_annotations_equal([101, 20228, 4017, 22571, 2271, 4783, 11650, 4521, 10733, 102],
wordpiece_indices)
self.assertEqual("[CLS] platypusbears eat pizza [SEP]", dataset_model.tokenizer.decode(wordpiece_indices))
np.testing.assert_allclose(wordpiece_alignment_vecs,
[[0.0000, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.166666667, 0.0000, 0.0000],
[0.0000, 1.0000, 0.0000],
[0.0000, 0.0000, 1.0000],
[0.0000, 0.0000, 0.0000]])
class AnnotationModelTest(DataTest):
def test_forward_wellformedness(self):
"""
Tests whether the outout of the forward function
of AnnotationModel is the one-hot representation
of the indices passed to it
"""
max_cardinality = 4
trainable = False
args = {'device':'cpu'}
annotation_model = model.AnnotationModel(args, max_cardinality, trainable)
annotation_tensor = torch.tensor(
[[1,2,1,3,0],[3,3,3,3,3], [2,1,0,0,0]]
)
alignment_tensor = None
output = annotation_model((annotation_tensor, alignment_tensor))
batch_size = 3
seq_len = 5
self.assertEqual(output.shape, (batch_size, seq_len, max_cardinality))
self.assert_annotations_equal([0,1,0,0], output[0,0])
self.assert_annotations_equal([0,0,0,1], output[1,4])
def test_pad_token_zeroed(self):
"""
Tests whether the pad token (0) is given the zero vector,
unlike all other token indices
"""
max_cardinality = 4
trainable = False
args = {'device':'cpu'}
annotation_model = model.AnnotationModel(args, max_cardinality, trainable)
annotation_tensor = torch.tensor(
[[1,2,1,3,0],[3,3,3,3,3], [2,1,0,0,0]]
)
alignment_tensor = None
output = annotation_model((annotation_tensor, alignment_tensor))
self.assert_annotations_equal([0,0,0,0], output[0,4])
self.assert_annotations_equal([0,0,0,0], output[2,2])
def test_trainable_is_respected(self):
"""
Tests whether the trainable flag, which is True if
a models should be fine-tuned during training, and False
otherwise, is respected.
That is, the embedding weights are trained (or not trained)
depending on the boolean value of trainable
"""
max_cardinality = 4
args = {'device':'cpu'}
for trainable in (False, True):
annotation_model = model.AnnotationModel(args, max_cardinality, trainable)
optimizer = torch.optim.SGD(annotation_model.embeddings.parameters(), lr=0.01)
weights = torch.nn.Parameter(torch.zeros(max_cardinality))
annotation_tensor = torch.tensor(
[[1,2,1,3,0],[3,3,3,3,3], [2,1,0,0,0]]
)
alignment_tensor = None
output = annotation_model((annotation_tensor, alignment_tensor))
nn.init.uniform_(weights)
# First pass
prediction = torch.dot(weights, output[0,0])
loss = (prediction - torch.tensor(1))**2
loss.backward()
optimizer.step()
# Second pass
new_output = annotation_model((annotation_tensor, alignment_tensor))
new_prediction = torch.dot(weights, new_output[0,0])
new_loss = (new_prediction - torch.tensor(1))**2
if not trainable:
self.assert_annotations_equal([0,1,0,0], annotation_model.embeddings.weight.data[1,:])
self.assertAlmostEqual(new_loss, loss, places=7)
else:
self.assertLess(new_loss, loss)
class ListDatasetTest(DataTest):
def test_collate_fn(self):
"""
Tests whether the outout of the forward function
of AnnotationModel is the one-hot representation
of the indices passed to it
"""
model_string = "google/bert_uncased_L-2_H-128_A-2"
args = {'device':'cpu'}
dataset_model = dataset.HuggingfaceData(args, model_string)
sentence1 = (('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun'))
sentence2 = (('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv'))
wordpiece_indices1, wordpiece_alignment_vecs1 = dataset_model.tensor_of_sentence(sentence1, TRAIN_STR)
wordpiece_indices2, wordpiece_alignment_vecs2 = dataset_model.tensor_of_sentence(sentence2, TRAIN_STR)
observation1 = ([(wordpiece_indices1, wordpiece_alignment_vecs1)],
(wordpiece_indices1, wordpiece_alignment_vecs1), sentence1)
observation2 = ([(wordpiece_indices2, wordpiece_alignment_vecs2)],
(wordpiece_indices2, wordpiece_alignment_vecs2), sentence2)
batch = dataset.ListDataset({'device':'cpu'}, None, None, None).collate_fn((observation1, observation2))
input_batch, output_batch, sentences_batch = batch
first_dataset_annotation_batch = input_batch[0][0]
first_dataset_alignment_batch = input_batch[1][0]
self.assert_2darray_equal(torch.tensor([[ 101., 20228., 4017., 22571., 2271., 4783., 11650., 4521., 10733.,
102., 0.],
[ 101., 2027., 13366., 28553., 6494., 2618., 8909., 8780., 21615.,
10785., 102.]]), output_batch)
self.assert_2darray_equal(torch.tensor([[ 101., 20228., 4017., 22571., 2271., 4783., 11650., 4521., 10733.,
102., 0.],
[ 101., 2027., 13366., 28553., 6494., 2618., 8909., 8780., 21615.,
10785., 102.]]), first_dataset_annotation_batch)
np.testing.assert_allclose(torch.tensor([[[0.0000, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.1666666667, 0.0000, 0.0000, 0.0000],
[0.0000, 1.0000, 0.0000, 0.0000],
[0.0000, 0.0000, 1.0000, 0.0000],
[0.0000, 0.0000, 0.0000, 0.0000],
[0.0000, 0.0000, 0.0000, 0.0000]],
[[0.0000, 0.0000, 0.0000, 0.0000],
[1.0000, 0.0000, 0.0000, 0.0000],
[0.0000, 0.2500, 0.0000, 0.0000],
[0.0000, 0.2500, 0.0000, 0.0000],
[0.0000, 0.2500, 0.0000, 0.0000],
[0.0000, 0.2500, 0.0000, 0.0000],
[0.0000, 0.0000, 0.3333333333, 0.0000],
[0.0000, 0.0000, 0.3333333333, 0.0000],
[0.0000, 0.0000, 0.3333333333, 0.0000],
[0.0000, 0.0000, 0.0000, 1.0000],
[0.0000, 0.0000, 0.0000, 0.0000]]])
, first_dataset_alignment_batch, atol=1e-6)
class HuggingfaceModelTest(DataTest):
def test_forward_wellformedness(self):
"""
Tests whether the outout of the forward function
of HuggingfaceModel is computed er... at least
in terms of the dimensions, and
the alignment to token indices properly applied
"""
max_cardinality = 4
trainable = False
layer_index = 2
model_string = "google/bert_uncased_L-2_H-128_A-2"
args = {'device':'cpu'}
huggingface_model = model.HuggingfaceModel(args, model_string, trainable, layer_index)
dataset_model = dataset.HuggingfaceData(args, model_string)
sentence1 = (('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun'))
sentence2 = (('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv'))
wordpiece_indices1, wordpiece_alignment_vecs1 = dataset_model.tensor_of_sentence(sentence1, TRAIN_STR)
wordpiece_indices2, wordpiece_alignment_vecs2 = dataset_model.tensor_of_sentence(sentence2, TRAIN_STR)
observation1 = ([(wordpiece_indices1, wordpiece_alignment_vecs1)],
(wordpiece_indices1, wordpiece_alignment_vecs1), sentence1)
observation2 = ([(wordpiece_indices2, wordpiece_alignment_vecs2)],
(wordpiece_indices2, wordpiece_alignment_vecs2), sentence2)
batch = dataset.ListDataset({'device':'cpu'}, None, None, None).collate_fn((observation1, observation2))
input_batch, output_batch, sentences_batch = batch
first_dataset_annotation_batch = input_batch[0][0]
first_dataset_alignment_batch = input_batch[1][0]
huggingface_model_output = huggingface_model((first_dataset_annotation_batch, first_dataset_alignment_batch))
self.assertEqual((2,4,128), huggingface_model_output.shape)
def test_trainable_is_respected(self):
"""
Tests whether the trainable flag, which is True if
a models should be fine-tuned during training, and False
otherwise, is respected.
That is, the huggingface model are trained (or not trained)
depending on the boolean value of trainable
"""
for trainable in (False, True):
layer_index = 2
model_string = "google/bert_uncased_L-2_H-128_A-2"
args = {'device':'cpu'}
huggingface_model = model.HuggingfaceModel(args, model_string, trainable, layer_index)
dataset_model = dataset.HuggingfaceData(args, model_string)
sentence1 = (('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun'))
sentence2 = (('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv'))
wordpiece_indices1, wordpiece_alignment_vecs1 = dataset_model.tensor_of_sentence(sentence1, TRAIN_STR)
wordpiece_indices2, wordpiece_alignment_vecs2 = dataset_model.tensor_of_sentence(sentence2, TRAIN_STR)
observation1 = ([(wordpiece_indices1, wordpiece_alignment_vecs1)],
(wordpiece_indices1, wordpiece_alignment_vecs1), sentence1)
observation2 = ([(wordpiece_indices2, wordpiece_alignment_vecs2)],
(wordpiece_indices2, wordpiece_alignment_vecs2), sentence2)
batch = dataset.ListDataset(args, None, None, None).collate_fn((observation1, observation2))
input_batch, output_batch, sentences_batch = batch
first_dataset_annotation_batch = input_batch[0][0]
first_dataset_alignment_batch = input_batch[1][0]
huggingface_model_output = huggingface_model((first_dataset_annotation_batch, first_dataset_alignment_batch))
# Optimizer and weights
optimizer = torch.optim.SGD(huggingface_model.parameters(), lr=0.000000001)
weights = torch.nn.Parameter(torch.zeros(huggingface_model_output.shape[2]))
nn.init.uniform_(weights)
# First pass
prediction = torch.dot(weights, huggingface_model_output[0,0])
loss = (prediction - torch.tensor(1))**2
loss.backward()
optimizer.step()
# Second pass
new_output = huggingface_model((first_dataset_annotation_batch, first_dataset_alignment_batch))
new_prediction = torch.dot(weights, new_output[0,0])
new_loss = (new_prediction - torch.tensor(1))**2
if not trainable:
self.assertAlmostEqual(new_loss, loss, places=7)
else:
self.assertLess(new_loss, loss)
class ListModelTest(DataTest):
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun')))))
self.tmpfile1.write('\n\n')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv')))))
self.tmpfile1.flush()
def tearDown(self):
self.tmpfile1.close()
def test_annotation_and_huggingface_list(self):
"""
Tests whether the outout of the forward function
of ListModel computes each representation and
concatenates their results.
"""
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
max_cardinality = 4
trainable = False
layer_index = 2
model_string = "google/bert_uncased_L-2_H-128_A-2"
huggingface_dataset = dataset.HuggingfaceData(args, model_string)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
pos_dataset = dataset.AnnotationData(args, pos_task)
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
token_dataset = dataset.AnnotationData(args, token_task)
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
list_dataset = dataset.ListDataset(args, generator, pos_dataset,
[token_dataset, huggingface_dataset])
annotation_model = model.AnnotationModel(args, max_cardinality=10, trainable=False)
huggingface_model = model.HuggingfaceModel(args, model_string, trainable, layer_index)
list_model = model.ListModel(args, [annotation_model, huggingface_model])
train_dataloader = list_dataset.get_train_dataloader(shuffle=False)
batch = next(iter(train_dataloader))
inputs, outputs, sentences = batch
output = list_model(inputs)
self.assertEqual((2,4,128+10), output.shape)
self.assert_annotations_equal([0.0000, 1.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000,
0.0000, 0.0000, 0.0000], output[0,0,:10])
def test_annotation_and_two_huggingface_model_list(self):
"""
Tests whether the outout of the forward function
of ListModel computes each representation and
concatenates their results.
"""
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
max_cardinality = 4
trainable = False
layer_index = 2
model_string1 = "google/bert_uncased_L-2_H-128_A-2"
model_string2 = "google/bert_uncased_L-4_H-128_A-2"
huggingface_dataset1 = dataset.HuggingfaceData(args, model_string1)
huggingface_dataset2 = dataset.HuggingfaceData(args, model_string2)
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
pos_dataset = dataset.AnnotationData(args, pos_task)
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
token_dataset = dataset.AnnotationData(args, token_task)
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
list_dataset = dataset.ListDataset(args, generator, pos_dataset,
[token_dataset, huggingface_dataset1, huggingface_dataset2])
annotation_model = model.AnnotationModel(args, max_cardinality=10, trainable=False)
huggingface_model1 = model.HuggingfaceModel(args, model_string1, trainable, layer_index)
huggingface_model2 = model.HuggingfaceModel(args, model_string2, trainable, layer_index)
list_model = model.ListModel(args, [annotation_model, huggingface_model1, huggingface_model2])
train_dataloader = list_dataset.get_train_dataloader(shuffle=False)
batch = next(iter(train_dataloader))
inputs, outputs, sentences = batch
output = list_model(inputs)
self.assertEqual((2,4,128+128+10), output.shape)
self.assert_annotations_equal([0.0000, 1.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000,
0.0000, 0.0000, 0.0000], output[0,0,:10])
class LinearTokenLabelProbeTest(DataTest):
def test_wellformedness(self):
"""
Tests whether the probe forward function computes
a function that transforms tensors of shape
(batch_size, seq_len, feature_count)
to tensors of shape
(batch_size, seq_len, output_size)
"""
args = {'device':'cpu'}
model_dim = 10
linear_label_probe = probe.OneWordLinearLabelProbe(
args, 128, model_dim)
batch_size, seq_len, feature_count = 4, 50, 128
batch = torch.ones((batch_size, seq_len, feature_count))
output = linear_label_probe(batch)
self.assertEqual((4,50,model_dim), output.shape)
class TrainerTest(DataTest):
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun')))))
self.tmpfile1.write('\n\n')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv')))))
self.tmpfile1.flush()
def tearDown(self):
self.tmpfile1.close()
os.remove('.params')
def test_wellformedness(self):
args = {'device':'cpu'}
input_fields = ['Token', 'PoS']
reporting_root = '.'
# Data loaders and feeders
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
pos_dataset = dataset.AnnotationData(args, pos_task)
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
token_dataset = dataset.AnnotationData(args, token_task)
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
list_dataset = dataset.ListDataset(args, generator, pos_dataset,
[token_dataset])
train_dataloader = list_dataset.get_train_dataloader(shuffle=False)
dev_dataloader = list_dataset.get_train_dataloader(shuffle=False)
# Models for generating representations
token_model = model.AnnotationModel(args, max_cardinality=10, trainable=True)
list_model = model.ListModel(args, [token_model])
# The probe model
pos_dim = 5
linear_label_probe = probe.OneWordLinearLabelProbe(
args, 10, 10)
# Training procedure
regimen = trainer.ProbeRegimen(args, max_epochs=2, params_path='.params', reporting_root=reporting_root)
dev_losses = regimen.train_until_convergence(linear_label_probe, list_model, None, train_dataloader, dev_dataloader,
gradient_steps_between_eval=1)
self.assertLess(dev_losses[1], dev_losses[0])
class LabelReporterTest(DataTest):
"""
Tests reporting of metrics for single-token tasks
"""
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'Platypusbears', 'Noun'), ('1', 'eat', 'Verb'), ('2', 'pizza', 'Noun')))))
self.tmpfile1.write('\n\n')
self.tmpfile1.write('\n'.join(
( '\t'.join(x) for x in
(('0', 'They', 'Noun'), ('1', 'defenestrate', 'Verb'), ('2', 'ideologies', 'Noun'), ('3', 'precisely', 'Adv')))))
self.tmpfile1.flush()
def tearDown(self):
self.tmpfile1.close()
os.remove('train.label_acc')
def test_label_accuracy_v_entropy(self):
args = {'device':'cpu'}
reporting_root = '.'
input_fields = ['Token', 'PoS']
batch_size, seq_len, label_count = 2, 4, 8
predictions = torch.zeros(batch_size, seq_len, label_count)
predictions[0,0,1], predictions[0,1,2], predictions[0,2,3] = 1,1,1
predictions[1,0,1], predictions[1,1,2], predictions[1,2,3], predictions[1,3,0] = 1,1,1,1
#predictions = predictions.cpu().numpy()
#labels = torch.tensor([[1,2,3],[2,3,0]])
#
#args = {'input_fields': ['Token', 'PoS'], "reporting":{"root":"."}}
# Data loaders and feeders
pos_task = task.TokenClassificationTask(args, 'PoS', input_fields)
pos_dataset = dataset.AnnotationData(args, pos_task)
token_task = task.TokenClassificationTask(args, 'Token', input_fields)
token_dataset = dataset.AnnotationData(args, token_task)
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
list_dataset = dataset.ListDataset(args, generator, pos_dataset,
[token_dataset])
train_dataloader = list_dataset.get_train_dataloader(shuffle=False)
dev_dataloader = list_dataset.get_train_dataloader(shuffle=False)
# The reporter
reporting_methods = ['label_accuracy', 'v_entropy']
reporter_class = reporter.IndependentLabelReporter(args, reporting_root, reporting_methods)
reporter_class([predictions], train_dataloader, TRAIN_STR)
with open('train.label_acc') as fin:
acc = float(fin.read().strip())
self.assertAlmostEqual(acc, 0.42857142857142855)
with open('train.v_entropy') as fin:
acc = float(fin.read().strip())
self.assertAlmostEqual(acc, 1.8454373223440987)
ner_data = \
"""1\tparts\tpart\tNOUN\tNNS\t_\t0\troot\t_\t_\twb/eng/00/eng_0017\t5\t0\tparts\tNNS\t(TOP(NP(NP*)\tpart\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*\t*\t-
3\tThe\tthe\tDET\tDT\t_\t5\tdet\t_\t_\twb/eng/00/eng_0017\t5\t2\tThe\tDT\t(NP(NP(NP*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t(WORK_OF_ART*\t*\t-
4\tBurning\tBurning\tPROPN\tNNP\t_\t5\tcompound\t_\t_\twb/eng/00/eng_0017\t5\t3\tBurning\tNNP\t*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*\t*\t-
5\tRoadblocks\tRoadblocks\tPROPN\tNNPS\t_\t1\tnmod\t_\t_\twb/eng/00/eng_0017\t5\t4\tRoadblocks\tNNPS\t*)\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*)\t*\t-
1\tparts\tpart\tNOUN\tNNS\t_\t0\troot\t_\t_\twb/eng/00/eng_0017\t5\t0\tparts\tNNS\t(TOP(NP(NP*)\tpart\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*\t*\t-
24\tRyszard\tRyszard\tPROPN\tNNP\t_\t25\tcompound\t_\t_\twb/eng/00/eng_0017\t5\t23\tRyszard\tNNP\t(NP*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t(PERSON*\t*\t-
25\tski\tski\tPROPN\tNNP\t_\t21\tnmod\t_\t_\twb/eng/00/eng_0017\t5\t24\tski\tNNP\t*))\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*)\t*\t-
31\t1991\t1991\tNUM\tCD\t_\t29\tamod\t_\t_\twb/eng/00/eng_0017\t5\t30\t1991\tCD\t(NP*))))))))\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t(DATE)\t*\t-
8\tfor\tfor\tADP\tIN\t_\t10\tcase\t_\t_\twb/eng/00/eng_0017\t5\t7\tfor\tIN\t(PP*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*\t*\t-
31\t1991\t1991\tNUM\tCD\t_\t29\tamod\t_\t_\twb/eng/00/eng_0017\t5\t30\t1991\tCD\t(NP*))))))))\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t(DATE)\t*\t-
27\tAlfred\tAlfred\tPROPN\tNNP\t_\t29\tcompound\t_\t_\twb/eng/00/eng_0017\t5\t26\tAlfred\tNNP\t(NP(NP*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t(ORG*\t*\t-
28\tA.\tA.\tPROPN\tNNP\t_\t29\tcompound\t_\t_\twb/eng/00/eng_0017\t5\t27\tA.\tNNP\t*\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*\t*\t-
29\tKnopf\tKnopf\tPROPN\tNNP\t_\t21\tappos\t_\t_\twb/eng/00/eng_0017\t5\t28\tKnopf\tNNP\t*)\t-\t-\t-\t_lekker_<lek...@intergate.bc.ca>_\t*)\t*\t-"""
class NERTest(DataTest):
"""
Tests loading and scoring of Named Entity Recognition tags
"""
def setUp(self):
"""
Writes temporary files with simple Ontonotes-formatted data
"""
self.tmpfile1 = tempfile.NamedTemporaryFile(mode='w')
self.tmpfile1.write(ner_data)
self.tmpfile1.flush()
def test_ner_bioes_tags(self):
"""
Tests whether NER annotations are as expected, on simulated NER data
The raw NER labels in the ontonotes look like this :
(WORK_OF_ART
)
(PERSON
)
(DATE)
(DATE)
(ORG
)
"""
args = {}
reporting_root = '.'
input_fields = ['index', 'token', 'lemma', 'upos', 'xpos', '-', 'head_index', 'dep_rel', '-', '-', 'section', 'index2', 'xpos2', 'parse_bit', 'lemma','-','-','speaker','-', '-', 'named_entities','-']
ner_task = task.NERClassificationTask(args, 'named_entities', input_fields)
ner_task._manual_setup()
sentence = [x.split('\t') for x in ner_data.split('\n')]
string_labels = ner_task._string_labels_of_sentence(sentence)
self.assert_annotations_equal(string_labels, ['O', 'B-WORK_OF_ART', 'I-WORK_OF_ART', 'E-WORK_OF_ART', 'O', 'B-PERSON', 'E-PERSON', 'S-DATE', 'O', 'S-DATE', 'B-ORG', 'I-ORG', 'E-ORG'])
integer_labels = ner_task.labels_of_sentence(sentence, TRAIN_STR)
np.testing.assert_allclose(integer_labels, [ 1., 2., 3., 4., 1., 5., 6., 7., 1., 7., 8., 9., 10.])
def test_ner_f1_evaluation(self):
args = {'device':'cpu'}
reporting_root = '.'
input_fields = ['index', 'token', 'lemma', 'upos', 'xpos', '-', 'head_index', 'dep_rel', '-', '-', 'section', 'index2', 'xpos2', 'parse_bit', 'lemma','-','-','speaker','-', '-', 'named_entities','-']
ner_task = task.NERClassificationTask(args, 'named_entities', input_fields)
ner_task._manual_setup()
sentence = [x.split('\t') for x in ner_data.split('\n')]
batch_size, seq_len, label_count = 1, 13, 11
predictions = torch.zeros(batch_size, seq_len, label_count)
#predictions[0,0,1], predictions[0,1,2], predictions[0,2,3] = 1,1,1
predictions[0,0,1], predictions[0,1,3], predictions[0,2,4], predictions[0,3,2] = 1,1,1,1
predictions[0,4,1], predictions[0,5,5], predictions[0,6,5], predictions[0,7,7] = 1,1,1,1
predictions[0,8,10], predictions[0,9,7], predictions[0,10,8], predictions[0,11,9] = 1,1,1,1
predictions[0,12,10] = 1
ner_dataset = dataset.AnnotationData(args, ner_task)
token_task = task.TokenClassificationTask(args, 'token', input_fields)
token_dataset = dataset.AnnotationData(args, token_task)
generator = dataset.OntonotesReader(None, self.tmpfile1.name, None, None, None)
list_dataset = dataset.ListDataset(args, generator, ner_dataset,
[token_dataset])
train_dataloader = list_dataset.get_train_dataloader(shuffle=False)
dev_dataloader = list_dataset.get_train_dataloader(shuffle=False)
# The reporter
reporting_methods = ['ner_f1', 'v_entropy']
reporter_class = reporter.NERReporter(args, reporting_root, reporting_methods, ner_task)
reporter_class([predictions], train_dataloader, TRAIN_STR)
with open('train.precision') as fin:
acc = float(fin.read().strip())
self.assertAlmostEqual(acc, .375)
with open('train.recall') as fin:
acc = float(fin.read().strip())
self.assertAlmostEqual(acc, .6)
with open('train.f1') as fin:
acc = float(fin.read().strip())
self.assertAlmostEqual(acc, 0.4615384615384615)
if __name__ == '__main__':
unittest.main()
| 42.283317
| 203
| 0.69311
| 5,715
| 42,833
| 4.984077
| 0.093263
| 0.019485
| 0.020854
| 0.028086
| 0.825411
| 0.809332
| 0.790514
| 0.770222
| 0.748455
| 0.735571
| 0
| 0.0563
| 0.163169
| 42,833
| 1,012
| 204
| 42.325099
| 0.738366
| 0.090421
| 0
| 0.7017
| 0
| 0.006182
| 0.068612
| 0.016469
| 0
| 0
| 0
| 0
| 0.136012
| 1
| 0.061824
| false
| 0
| 0.023184
| 0
| 0.106646
| 0.003091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53387ad4b88589690c998823048ebb084d46e063
| 1,981
|
py
|
Python
|
ChainJobs/RasterProducerJob.py
|
ptsagkis/UGLab
|
2ef1cdd0927152c763f6b36154b0a7b6c4ca2d54
|
[
"MIT"
] | 1
|
2021-11-03T20:31:57.000Z
|
2021-11-03T20:31:57.000Z
|
ChainJobs/RasterProducerJob.py
|
ptsagkis/UGLab
|
2ef1cdd0927152c763f6b36154b0a7b6c4ca2d54
|
[
"MIT"
] | null | null | null |
ChainJobs/RasterProducerJob.py
|
ptsagkis/UGLab
|
2ef1cdd0927152c763f6b36154b0a7b6c4ca2d54
|
[
"MIT"
] | null | null | null |
from DataHandler.Raster.RasterUtils import RasterUtils
from Services.SpinnerThread import SpinnerThread
from Config.Constants import Constants
class RasterProducerJob:
"""
From the generated tabular data create respective raster files
"""
def __init__(self, project_path):
self.PROJECT_PATH = project_path
self.spinner_thread = SpinnerThread()
def execute(self):
self.spinner_thread.start()
RasterUtils.create_raster_from_csv(
self.PROJECT_PATH + Constants.OUTPUT_PREDICTION_CORINE_CSV1,
5,
self.PROJECT_PATH + Constants.OUTPUT_RECLASS_RASTER_CORINE_STEPS_CODE_2[3],
self.PROJECT_PATH + 'ml_data\\corine_2030_PREDICTED_SEQ.tif'
)
RasterUtils.create_raster_from_csv(
self.PROJECT_PATH + Constants.OUTPUT_PREDICTION_CORINE_CSV1,
4,
self.PROJECT_PATH + Constants.OUTPUT_RECLASS_RASTER_CORINE_STEPS_CODE_2[1],
self.PROJECT_PATH + 'ml_data\\corine_2018_PREDICTED_SEQ.tif'
)
RasterUtils.create_raster_from_csv(
self.PROJECT_PATH + Constants.OUTPUT_PREDICTION_CORINE_CSV2,
5,
self.PROJECT_PATH + Constants.OUTPUT_RECLASS_RASTER_CORINE_STEPS_CODE_2[3],
self.PROJECT_PATH + 'ml_data\\corine_2030_PREDICTED_RF.tif'
)
RasterUtils.create_raster_from_csv(
self.PROJECT_PATH + Constants.OUTPUT_PREDICTION_CORINE_CSV2,
4,
self.PROJECT_PATH + Constants.OUTPUT_RECLASS_RASTER_CORINE_STEPS_CODE_2[1],
self.PROJECT_PATH + 'ml_data\\corine_2018_PREDICTED_RF.tif'
)
RasterUtils.create_raster_from_csv(
self.PROJECT_PATH + Constants.OUTPUT_PREDICTION_CORINE_CSV1,
3,
self.PROJECT_PATH + Constants.OUTPUT_RECLASS_RASTER_CORINE_STEPS_CODE_2[1],
self.PROJECT_PATH + 'ml_data\\corine_2018_REAL.tif'
)
self.spinner_thread.stop()
| 38.843137
| 87
| 0.690056
| 231
| 1,981
| 5.467532
| 0.21645
| 0.15677
| 0.2019
| 0.190024
| 0.726049
| 0.726049
| 0.726049
| 0.726049
| 0.726049
| 0.726049
| 0
| 0.026596
| 0.240787
| 1,981
| 51
| 88
| 38.843137
| 0.813165
| 0.031297
| 0
| 0.475
| 1
| 0
| 0.094013
| 0.094013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.075
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
726466aba708a8794589e6757ca7fbc457ed02fc
| 29
|
py
|
Python
|
src/hogelib/v1/version.py
|
nabetani/pythonver
|
d0be886f66d51105130bf9188db376e101fc0c64
|
[
"MIT"
] | null | null | null |
src/hogelib/v1/version.py
|
nabetani/pythonver
|
d0be886f66d51105130bf9188db376e101fc0c64
|
[
"MIT"
] | null | null | null |
src/hogelib/v1/version.py
|
nabetani/pythonver
|
d0be886f66d51105130bf9188db376e101fc0c64
|
[
"MIT"
] | null | null | null |
def version():
return "1.0"
| 14.5
| 14
| 0.62069
| 5
| 29
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.172414
| 29
| 2
| 15
| 14.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f415cf4c1e57ad2f372f6252be37c1815a32013d
| 126,851
|
py
|
Python
|
win/devkit/other/pymel/extras/completion/py/pymel/core/runtime.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 10
|
2018-03-30T16:09:02.000Z
|
2021-12-07T07:29:19.000Z
|
win/devkit/other/pymel/extras/completion/py/pymel/core/runtime.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | null | null | null |
win/devkit/other/pymel/extras/completion/py/pymel/core/runtime.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 9
|
2018-06-02T09:18:49.000Z
|
2021-12-20T09:24:35.000Z
|
import pymel.internal.factories as _factories
def RenderTextureRangeOptions(*args, **kwargs):
pass
def HideUnselectedObjects(*args, **kwargs):
pass
def CreateSubCharacterOptions(*args, **kwargs):
pass
def GetToonExample(*args, **kwargs):
pass
def RenderLayerEditorWindow(*args, **kwargs):
pass
def ToggleModelingToolkit(*args, **kwargs):
pass
def ToggleSurfaceOrigin(*args, **kwargs):
pass
def FluidGradients(*args, **kwargs):
pass
def InsertKnotOptions(*args, **kwargs):
pass
def RenameCurrentUVSet(*args, **kwargs):
pass
def SubdividePolygon(*args, **kwargs):
pass
def DetachComponent(*args, **kwargs):
pass
def HypershadeDisplayAsList(*args, **kwargs):
pass
def AddShrinkWrapSurfaces(*args, **kwargs):
pass
def NodeEditorToggleZoomIn(*args, **kwargs):
pass
def RemoveBlendShape(*args, **kwargs):
pass
def HypershadeCollapseAsset(*args, **kwargs):
pass
def NEmitFromObject(*args, **kwargs):
pass
def BrushPresetBlendShapeOff(*args, **kwargs):
pass
def AddPondDynamicBuoy(*args, **kwargs):
pass
def TogglePolygonFaceCenters(*args, **kwargs):
pass
def fluidReplaceFramesOpt(*args, **kwargs):
pass
def CreatePolygonCylinderOptions(*args, **kwargs):
pass
def AddOceanSurfaceLocator(*args, **kwargs):
pass
def PolyExtrudeVerticesOptions(*args, **kwargs):
pass
def ToggleMeshEdges(*args, **kwargs):
pass
def PickWalkRight(*args, **kwargs):
pass
def SelectAllInput(*args, **kwargs):
pass
def attachGeometryCache(*args, **kwargs):
pass
def AddMissingFBIKEffectors(*args, **kwargs):
pass
def ConnectionEditor(*args, **kwargs):
pass
def PolyExtrudeEdgesOptions(*args, **kwargs):
pass
def ProjectWindow(*args, **kwargs):
pass
def SoloMaterial(*args, **kwargs):
pass
def PaintSetMembershipToolOptions(*args, **kwargs):
pass
def DeleteAllPoses(*args, **kwargs):
pass
def PolyDisplayReset(*args, **kwargs):
pass
def UnmirrorSmoothProxy(*args, **kwargs):
pass
def PublishAttributesOptions(*args, **kwargs):
pass
def GroupOptions(*args, **kwargs):
pass
def AffectSelectedObject(*args, **kwargs):
pass
def CreateOceanWake(*args, **kwargs):
pass
def PreInfinityCycle(*args, **kwargs):
pass
def GoToMaxFrame(*args, **kwargs):
pass
def ToggleFrameRate(*args, **kwargs):
pass
def CreateActiveRigidBodyOptions(*args, **kwargs):
pass
def AddDynamicBuoyOptions(*args, **kwargs):
pass
def PolyAssignSubdivHole(*args, **kwargs):
pass
def HypershadeShowDirectoriesAndFiles(*args, **kwargs):
pass
def PostInfinityOscillate(*args, **kwargs):
pass
def SetProject(*args, **kwargs):
pass
def Snap2PointsTo2PointsOptions(*args, **kwargs):
pass
def PointOnPolyConstraintOptions(*args, **kwargs):
pass
def DeleteAllContainers(*args, **kwargs):
pass
def SetMeshPinchTool(*args, **kwargs):
pass
def CreateDirectionalLight(*args, **kwargs):
pass
def SineOptions(*args, **kwargs):
pass
def UnlockNormals(*args, **kwargs):
pass
def AddInfluenceOptions(*args, **kwargs):
pass
def SelectFacetMask(*args, **kwargs):
pass
def HideAll(*args, **kwargs):
pass
def MergeToCenter(*args, **kwargs):
pass
def SetMeshFreezeTool(*args, **kwargs):
pass
def HideFur(*args, **kwargs):
pass
def mayaPreviewRenderIntoNewWindow(*args, **kwargs):
pass
def ToggleDisplayGradient(*args, **kwargs):
pass
def PostInfinityCycleOffset(*args, **kwargs):
pass
def AddDynamicBuoy(*args, **kwargs):
pass
def EnableNCloths(*args, **kwargs):
pass
def SetMeshAmplifyTool(*args, **kwargs):
pass
def TimeDraggerToolActivate(*args, **kwargs):
pass
def SelectContiguousEdgesOptions(*args, **kwargs):
pass
def DuplicateSpecial(*args, **kwargs):
pass
def BrushAnimationMarkingMenuPopDown(*args, **kwargs):
pass
def ResetWeightsToDefault(*args, **kwargs):
pass
def geometryDeleteCacheFrames(*args, **kwargs):
pass
def Bevel(*args, **kwargs):
pass
def TranslateToolWithSnapMarkingMenuPopDown(*args, **kwargs):
pass
def PolygonCopyOptions(*args, **kwargs):
pass
def LoftOptions(*args, **kwargs):
pass
def CreateCreaseSetOptions(*args, **kwargs):
pass
def SelectNone(*args, **kwargs):
pass
def SelectAllPolygonGeometry(*args, **kwargs):
pass
def BrushPresetBlend(*args, **kwargs):
pass
def CreateBindingSet(*args, **kwargs):
pass
def DisplayWireframe(*args, **kwargs):
pass
def HypershadeMoveTabUp(*args, **kwargs):
pass
def MergeVerticesOptions(*args, **kwargs):
pass
def GoToFBIKStancePose(*args, **kwargs):
pass
def ChamferVertex(*args, **kwargs):
pass
def HypershadeEditPSDFile(*args, **kwargs):
pass
def BakeNonDefHistory(*args, **kwargs):
pass
def HideSelectedObjects(*args, **kwargs):
pass
def ToggleViewCube(*args, **kwargs):
pass
def CreateShrinkWrap(*args, **kwargs):
pass
def LassoTool(*args, **kwargs):
pass
def ToggleSelectDetails(*args, **kwargs):
pass
def CreateFBIKOptions(*args, **kwargs):
pass
def ShowIKHandles(*args, **kwargs):
pass
def InsertEdgeLoopToolOptions(*args, **kwargs):
pass
def Birail1(*args, **kwargs):
pass
def OffsetEdgeLoopToolOptions(*args, **kwargs):
pass
def ToggleEditPoints(*args, **kwargs):
pass
def DeleteStaticChannelsOptions(*args, **kwargs):
pass
def HypershadeDeleteAllUtilities(*args, **kwargs):
pass
def BevelOptions(*args, **kwargs):
pass
def NodeEditorToggleConsistentNodeNameSize(*args, **kwargs):
pass
def MakePaintable(*args, **kwargs):
pass
def ArtPaintBlendShapeWeightsToolOptions(*args, **kwargs):
pass
def CreatePolygonToolOptions(*args, **kwargs):
pass
def ShowHairSystems(*args, **kwargs):
pass
def DuplicateSpecialOptions(*args, **kwargs):
pass
def CutPolygon(*args, **kwargs):
pass
def PlayblastWindow(*args, **kwargs):
pass
def ShowGeometry(*args, **kwargs):
pass
def FBIKReachKeyingOptionSimple(*args, **kwargs):
pass
def PerspOutlinerLayout(*args, **kwargs):
pass
def OutlinerUnhide(*args, **kwargs):
pass
def SquashOptions(*args, **kwargs):
pass
def SetFocusToNumericInputLine(*args, **kwargs):
pass
def CompleteCurrentTool(*args, **kwargs):
pass
def DeleteEntireHairSystem(*args, **kwargs):
pass
def SnapToGrid(*args, **kwargs):
pass
def RadialOptions(*args, **kwargs):
pass
def ShowBaseWire(*args, **kwargs):
pass
def AnimationSweepOptions(*args, **kwargs):
pass
def ShowAllComponents(*args, **kwargs):
pass
def GraphSnap(*args, **kwargs):
pass
def ToggleMaterialLoadingDetailsVisibility(*args, **kwargs):
pass
def MirrorCutPolygonGeometryOptions(*args, **kwargs):
pass
def SinglePerspectiveViewLayout(*args, **kwargs):
pass
def nClothAppendOpt(*args, **kwargs):
pass
def AddToCharacterSet(*args, **kwargs):
pass
def CreateNURBSSphere(*args, **kwargs):
pass
def ExtendFluidOptions(*args, **kwargs):
pass
def ToggleFaceIDs(*args, **kwargs):
pass
def Create2DContainer(*args, **kwargs):
pass
def SimplifyCurve(*args, **kwargs):
pass
def deleteGeometryCache(*args, **kwargs):
pass
def HypershadeOutlinerPerspLayout(*args, **kwargs):
pass
def HypershadeSetSmallNodeSwatchSize(*args, **kwargs):
pass
def ShadingGroupAttributeEditor(*args, **kwargs):
pass
def mrMapVisualizer(*args, **kwargs):
pass
def SmoothProxyOptions(*args, **kwargs):
pass
def UntrimSurfaces(*args, **kwargs):
pass
def DecrementFluidCenter(*args, **kwargs):
pass
def HideIntermediateObjects(*args, **kwargs):
pass
def SetToFaceNormalsOptions(*args, **kwargs):
pass
def NextManipulatorHandle(*args, **kwargs):
pass
def HypershadeOpenConnectWindow(*args, **kwargs):
pass
def HIKPinTranslate(*args, **kwargs):
pass
def MarkingMenuPopDown(*args, **kwargs):
pass
def ToggleCommandLine(*args, **kwargs):
pass
def ConvertSelectionToVertexPerimeter(*args, **kwargs):
pass
def EmptyAnimLayer(*args, **kwargs):
pass
def HypershadeRenderTextureRangeOptions(*args, **kwargs):
pass
def PaintEffectsMeshQuality(*args, **kwargs):
pass
def RigidBindSkin(*args, **kwargs):
pass
def NodeEditorSetLargeNodeSwatchSize(*args, **kwargs):
pass
def HypershadeOpenPropertyEditorWindow(*args, **kwargs):
pass
def BoundaryOptions(*args, **kwargs):
pass
def CreateCharacterOptions(*args, **kwargs):
pass
def Uniform(*args, **kwargs):
pass
def U3DBrushPressureOff(*args, **kwargs):
pass
def NodeEditorRenderSwatches(*args, **kwargs):
pass
def CreateWakeOptions(*args, **kwargs):
pass
def MakeHoleTool(*args, **kwargs):
pass
def CreateCameraAimUpOptions(*args, **kwargs):
pass
def TransferAttributeValuesOptions(*args, **kwargs):
pass
def PolygonBooleanIntersectionOptions(*args, **kwargs):
pass
def SelectTextureReferenceObject(*args, **kwargs):
pass
def LevelOfDetailUngroup(*args, **kwargs):
pass
def CreateConstraint(*args, **kwargs):
pass
def CreateBlendShape(*args, **kwargs):
pass
def SelectIsolate(*args, **kwargs):
pass
def ClearCurrentContainer(*args, **kwargs):
pass
def DisplacementToPolygon(*args, **kwargs):
pass
def HypershadeGraphUpstream(*args, **kwargs):
pass
def BrushPresetReplaceShadingOff(*args, **kwargs):
pass
def DeleteEdge(*args, **kwargs):
pass
def ReversePolygonNormalsOptions(*args, **kwargs):
pass
def AutobindContainer(*args, **kwargs):
pass
def CreateSubdivSurface(*args, **kwargs):
pass
def HideNParticles(*args, **kwargs):
pass
def ToggleUIElements(*args, **kwargs):
pass
def CreateLattice(*args, **kwargs):
pass
def ToggleCameraNames(*args, **kwargs):
pass
def CreateEmitter(*args, **kwargs):
pass
def FrameSelectedInAllViews(*args, **kwargs):
pass
def BakeNonDefHistoryOptions(*args, **kwargs):
pass
def CreateDiskCacheOptions(*args, **kwargs):
pass
def SeparatePolygon(*args, **kwargs):
pass
def SurfaceBooleanSubtractToolOptions(*args, **kwargs):
pass
def DetachSkinOptions(*args, **kwargs):
pass
def DistanceTool(*args, **kwargs):
pass
def BakeAllNonDefHistory(*args, **kwargs):
pass
def TimeDraggerToolDeactivate(*args, **kwargs):
pass
def StraightenUVBorderOptions(*args, **kwargs):
pass
def NormalConstraint(*args, **kwargs):
pass
def HypershadeCreateContainerOptions(*args, **kwargs):
pass
def AssumePreferredAngle(*args, **kwargs):
pass
def RebuildCurve(*args, **kwargs):
pass
def ToggleSceneTimecode(*args, **kwargs):
pass
def ApplySettingsToSelectedStroke(*args, **kwargs):
pass
def CreatePolygonPrism(*args, **kwargs):
pass
def FlipUVs(*args, **kwargs):
pass
def ToggleViewportRenderer(*args, **kwargs):
pass
def ToggleMultiColorFeedback(*args, **kwargs):
pass
def Planar(*args, **kwargs):
pass
def PolygonHardenEdge(*args, **kwargs):
pass
def ExtrudeEdge(*args, **kwargs):
pass
def ConvertSelectionToEdges(*args, **kwargs):
pass
def SculptReferenceVectorMarkingMenuRelease(*args, **kwargs):
pass
def SplitMeshWithProjectedCurveOptions(*args, **kwargs):
pass
def SelectVertexMask(*args, **kwargs):
pass
def CoarsenSelectedComponents(*args, **kwargs):
pass
def PublishParentAnchorOptions(*args, **kwargs):
pass
def PencilCurveTool(*args, **kwargs):
pass
def GoToPreviousDrivenKey(*args, **kwargs):
pass
def AlignCurveOptions(*args, **kwargs):
pass
def CreatePassiveRigidBodyOptions(*args, **kwargs):
pass
def GraphCopy(*args, **kwargs):
pass
def WireDropoffLocatorOptions(*args, **kwargs):
pass
def CreateNURBSCircleOptions(*args, **kwargs):
pass
def ExportSelectionOptions(*args, **kwargs):
pass
def HypershadeTestTextureOptions(*args, **kwargs):
pass
def ParticleFillOptions(*args, **kwargs):
pass
def PolyRemoveCrease(*args, **kwargs):
pass
def CopyKeysOptions(*args, **kwargs):
pass
def nClothReplaceFrames(*args, **kwargs):
pass
def WhatsNewHighlightingOff(*args, **kwargs):
pass
def HypershadeSelectDownStream(*args, **kwargs):
pass
def DeleteAllImagePlanes(*args, **kwargs):
pass
def ParentConstraintOptions(*args, **kwargs):
pass
def HypergraphDGWindow(*args, **kwargs):
pass
def nClothMergeCache(*args, **kwargs):
pass
def SmoothBindSkin(*args, **kwargs):
pass
def NodeEditorGraphAddSelected(*args, **kwargs):
pass
def SelectToolMarkingMenuPopDown(*args, **kwargs):
pass
def Wave(*args, **kwargs):
pass
def SelectToolOptionsMarkingMenuPopDown(*args, **kwargs):
pass
def CylindricalProjection(*args, **kwargs):
pass
def AddTimeWarp(*args, **kwargs):
pass
def ParameterTool(*args, **kwargs):
pass
def NURBSToPolygonsOptions(*args, **kwargs):
pass
def Vortex(*args, **kwargs):
pass
def AddHolder(*args, **kwargs):
pass
def ViewSequence(*args, **kwargs):
pass
def ToggleBackfaceCulling(*args, **kwargs):
pass
def SelectSharedUVInstances(*args, **kwargs):
pass
def PaintRandomOptions(*args, **kwargs):
pass
def EditFluidResolution(*args, **kwargs):
pass
def HypershadeRefreshFileListing(*args, **kwargs):
pass
def UpdateEraseSurface(*args, **kwargs):
pass
def OutlinerToggleShapes(*args, **kwargs):
pass
def geometryReplaceCache(*args, **kwargs):
pass
def CVHardnessOptions(*args, **kwargs):
pass
def Birail1Options(*args, **kwargs):
pass
def CustomNURBSComponentsOptions(*args, **kwargs):
pass
def ScriptPaintToolOptions(*args, **kwargs):
pass
def FreeformFillet(*args, **kwargs):
pass
def TumbleTool(*args, **kwargs):
pass
def PolygonSoftenHarden(*args, **kwargs):
pass
def ScaleToolOptions(*args, **kwargs):
pass
def MakeBrushSpring(*args, **kwargs):
pass
def TransplantHair(*args, **kwargs):
pass
def FreezeTransformationsOptions(*args, **kwargs):
pass
def PolySelectTool(*args, **kwargs):
pass
def SelectPreviousObjects3dsMax(*args, **kwargs):
pass
def AddCurvesToHairSystem(*args, **kwargs):
pass
def CreateCameraFromView(*args, **kwargs):
pass
def ScaleToolWithSnapMarkingMenu(*args, **kwargs):
pass
def ScaleKeys(*args, **kwargs):
pass
def SwapBlendShapeOptions(*args, **kwargs):
pass
def ChannelControlEditor(*args, **kwargs):
pass
def ConnectJointOptions(*args, **kwargs):
pass
def HypershadeGraphClearGraph(*args, **kwargs):
pass
def BaseLevelComponentDisplay(*args, **kwargs):
pass
def TexSculptUnpinAll(*args, **kwargs):
pass
def HideUIElements(*args, **kwargs):
pass
def TransferAttributeValues(*args, **kwargs):
pass
def SaveFluidStateAs(*args, **kwargs):
pass
def AttachSurfaces(*args, **kwargs):
pass
def CreateSpringOptions(*args, **kwargs):
pass
def GetOceanPondExample(*args, **kwargs):
pass
def TexSculptActivateBrushSize(*args, **kwargs):
pass
def UpdateCurrentScene3dsMax(*args, **kwargs):
pass
def NParticleToolOptions(*args, **kwargs):
pass
def ToggleSubdDetails(*args, **kwargs):
pass
def CreateHairCache(*args, **kwargs):
pass
def STRSTweakModeOff(*args, **kwargs):
pass
def geometryMergeCacheOpt(*args, **kwargs):
pass
def FluidEmitter(*args, **kwargs):
pass
def InsertKeysToolOptions(*args, **kwargs):
pass
def TemplateObject(*args, **kwargs):
pass
def ProfilerToolToggleRecording(*args, **kwargs):
pass
def SubdivToNURBS(*args, **kwargs):
pass
def ConnectComponents(*args, **kwargs):
pass
def ConvertToKey(*args, **kwargs):
pass
def HypershadeDisplayAsIcons(*args, **kwargs):
pass
def TangentsStepped(*args, **kwargs):
pass
def IntersectCurveOptions(*args, **kwargs):
pass
def NodeEditorToggleSyncedSelection(*args, **kwargs):
pass
def RelaxUVShell(*args, **kwargs):
pass
def AssignBrushToHairSystem(*args, **kwargs):
pass
def HypershadeCloseActiveTab(*args, **kwargs):
pass
def NCreateEmitter(*args, **kwargs):
pass
def CreateQuickSelectSet(*args, **kwargs):
pass
def ProfilerTool(*args, **kwargs):
pass
def TogglePolyDisplaySoftEdges(*args, **kwargs):
pass
def ArtPaintSelectTool(*args, **kwargs):
pass
def CreatePolygonCubeOptions(*args, **kwargs):
pass
def FireOptions(*args, **kwargs):
pass
def IncreaseExposureCoarse(*args, **kwargs):
pass
def PickWalkLeft(*args, **kwargs):
pass
def SelectAllIKHandles(*args, **kwargs):
pass
def PrelightPolygonOptions(*args, **kwargs):
pass
def HypershadeToggleZoomOut(*args, **kwargs):
pass
def OutTangentSpline(*args, **kwargs):
pass
def ShowMeshGrabToolOptions(*args, **kwargs):
pass
def SoftModToolOptions(*args, **kwargs):
pass
def PreInfinityLinear(*args, **kwargs):
pass
def DeleteAllNonLinearDeformers(*args, **kwargs):
pass
def ApplySettingsToLastStroke(*args, **kwargs):
pass
def ShowMeshBulgeToolOptions(*args, **kwargs):
pass
def PruneWire(*args, **kwargs):
pass
def GridUVOptions(*args, **kwargs):
pass
def PostInfinityLinear(*args, **kwargs):
pass
def MirrorSkinWeights(*args, **kwargs):
pass
def CreateOcean(*args, **kwargs):
pass
def ShowLights(*args, **kwargs):
pass
def HypershadeDisplayAsExtraLargeSwatches(*args, **kwargs):
pass
def GoToDefaultView(*args, **kwargs):
pass
def Create3DContainerOptions(*args, **kwargs):
pass
def SetIKFKKeyframe(*args, **kwargs):
pass
def ExportDeformerWeightsOptions(*args, **kwargs):
pass
def HypershadeShowConnectedAttrs(*args, **kwargs):
pass
def CreateEmptyUVSetOptions(*args, **kwargs):
pass
def ConvertSelectionToUVs(*args, **kwargs):
pass
def SmoothingLevelIncrease(*args, **kwargs):
pass
def SetFullBodyIKKeysAll(*args, **kwargs):
pass
def ShowFur(*args, **kwargs):
pass
def DeleteAllClusters(*args, **kwargs):
pass
def HypershadeDeleteAllCamerasAndImagePlanes(*args, **kwargs):
pass
def SimplifyStrokePathCurves(*args, **kwargs):
pass
def NodeEditorCloseActiveTab(*args, **kwargs):
pass
def SetFluidAttrFromCurve(*args, **kwargs):
pass
def ShowDynamicConstraints(*args, **kwargs):
pass
def MergeMultipleEdges(*args, **kwargs):
pass
def EnterEditMode(*args, **kwargs):
pass
def SetCurrentColorSet(*args, **kwargs):
pass
def ToggleCurrentFrame(*args, **kwargs):
pass
def TrackTool(*args, **kwargs):
pass
def clearDynStartState(*args, **kwargs):
pass
def EnableIKSolvers(*args, **kwargs):
pass
def ThreeRightSplitViewArrangement(*args, **kwargs):
pass
def PaintEffectsToPolyOptions(*args, **kwargs):
pass
def ConvertSelectionToUVEdgeLoop(*args, **kwargs):
pass
def HypershadePickWalkDown(*args, **kwargs):
pass
def BridgeEdgeOptions(*args, **kwargs):
pass
def FloodSurfaces(*args, **kwargs):
pass
def ResetTransformations(*args, **kwargs):
pass
def UVSetEditor(*args, **kwargs):
pass
def BendOptions(*args, **kwargs):
pass
def CurveFilletOptions(*args, **kwargs):
pass
def SelectUVBorder(*args, **kwargs):
pass
def NURBSTexturePlacementTool(*args, **kwargs):
pass
def TranslateToolMarkingMenuPopDown(*args, **kwargs):
pass
def PolygonCollapseFaces(*args, **kwargs):
pass
def PasteKeysOptions(*args, **kwargs):
pass
def NURBSSmoothnessHullOptions(*args, **kwargs):
pass
def ToggleUseDefaultMaterial(*args, **kwargs):
pass
def NextSkinPaintMode(*args, **kwargs):
pass
def SelectMultiComponentMask(*args, **kwargs):
pass
def ActivateViewport20(*args, **kwargs):
pass
def NParticleTool(*args, **kwargs):
pass
def CreateBezierCurveTool(*args, **kwargs):
pass
def HypershadeMoveTabLeft(*args, **kwargs):
pass
def ComponentEditor(*args, **kwargs):
pass
def CancelBatchRender(*args, **kwargs):
pass
def ToggleStatusLine(*args, **kwargs):
pass
def MoveUp(*args, **kwargs):
pass
def BakeChannelOptions(*args, **kwargs):
pass
def CreateTextureReferenceObject(*args, **kwargs):
pass
def RemoveFromContainerOptions(*args, **kwargs):
pass
def HidePolygonSurfaces(*args, **kwargs):
pass
def MoveSewUVs(*args, **kwargs):
pass
def PreInfinityConstant(*args, **kwargs):
pass
def FullHotboxDisplay(*args, **kwargs):
pass
def KeyframeTangentMarkingMenu(*args, **kwargs):
pass
def MovePolygonComponentOptions(*args, **kwargs):
pass
def MoveNormalTool(*args, **kwargs):
pass
def InitialFluidStatesOptions(*args, **kwargs):
pass
def DisableExpressions(*args, **kwargs):
pass
def fluidDeleteCacheOpt(*args, **kwargs):
pass
def MoveNearestPickedKeyToolDeactivate(*args, **kwargs):
pass
def MoveNearestPickedKeyToolActivate(*args, **kwargs):
pass
def OffsetCurveOptions(*args, **kwargs):
pass
def AnimationTurntable(*args, **kwargs):
pass
def DeleteSelectedContainers(*args, **kwargs):
pass
def NURBSToPolygons(*args, **kwargs):
pass
def FlipTriangleEdge(*args, **kwargs):
pass
def NodeEditorShowCustomAttrs(*args, **kwargs):
pass
def RedoPreviousRender(*args, **kwargs):
pass
def Redo(*args, **kwargs):
pass
def MoveRotateScaleTool(*args, **kwargs):
pass
def CreatePolygonSphereOptions(*args, **kwargs):
pass
def CreateExpressionClipOptions(*args, **kwargs):
pass
def ShowStrokes(*args, **kwargs):
pass
def PlaybackToggle(*args, **kwargs):
pass
def SubdivSmoothnessFineOptions(*args, **kwargs):
pass
def ImportDeformerWeights(*args, **kwargs):
pass
def FBIKReachKeyingOptionFK(*args, **kwargs):
pass
def ImportOptions(*args, **kwargs):
pass
def fluidDeleteCacheFrames(*args, **kwargs):
pass
def AimConstraintOptions(*args, **kwargs):
pass
def ShowSelectedObjects(*args, **kwargs):
pass
def PerspGraphLayout(*args, **kwargs):
pass
def SquareSurfaceOptions(*args, **kwargs):
pass
def IKSplineHandleToolOptions(*args, **kwargs):
pass
def CommandShell(*args, **kwargs):
pass
def DeleteCurrentUVSet(*args, **kwargs):
pass
def ShowPolygonSurfaces(*args, **kwargs):
pass
def AddWire(*args, **kwargs):
pass
def SnapPointToPointOptions(*args, **kwargs):
pass
def Quit(*args, **kwargs):
pass
def fluidAppendOpt(*args, **kwargs):
pass
def AnimationSnapshotOptions(*args, **kwargs):
pass
def ShowNURBSCurves(*args, **kwargs):
pass
def AddToContainer(*args, **kwargs):
pass
def ProjectTangent(*args, **kwargs):
pass
def SetMeshImprintTool(*args, **kwargs):
pass
def ToggleLayerBar(*args, **kwargs):
pass
def ShowMeshWaxToolOptions(*args, **kwargs):
pass
def CreateNURBSPlane(*args, **kwargs):
pass
def ExtendCurveOptions(*args, **kwargs):
pass
def ShowWrapInfluences(*args, **kwargs):
pass
def nucleusDisplayTextureNodes(*args, **kwargs):
pass
def ToggleFBIKEffectorsRotatePinState(*args, **kwargs):
pass
def CreaseProxyEdgeTool(*args, **kwargs):
pass
def PixelMoveDown(*args, **kwargs):
pass
def HypershadeSelectUtilities(*args, **kwargs):
pass
def SmoothPolygonOptions(*args, **kwargs):
pass
def NodeEditorGraphRemoveDownstream(*args, **kwargs):
pass
def OutlinerToggleConnected(*args, **kwargs):
pass
def DecreaseGammaFine(*args, **kwargs):
pass
def HideHotbox(*args, **kwargs):
pass
def CopyMeshAttributes(*args, **kwargs):
pass
def NextGreasePencilFrame(*args, **kwargs):
pass
def OutlinerExpandAllItems(*args, **kwargs):
pass
def HIKLiveConnectionTool(*args, **kwargs):
pass
def AddKeysTool(*args, **kwargs):
pass
def ToggleChannelsLayers(*args, **kwargs):
pass
def SubdivSmoothnessHullOptions(*args, **kwargs):
pass
def EmitFromObject(*args, **kwargs):
pass
def HypershadeRenderPerspLayout(*args, **kwargs):
pass
def PaintEffectPanelDeactivate(*args, **kwargs):
pass
def Revolve(*args, **kwargs):
pass
def HypershadeOpenModelEditorWindow(*args, **kwargs):
pass
def NodeEditorReduceTraversalDepth(*args, **kwargs):
pass
def CutKeys(*args, **kwargs):
pass
def OptimzeUVsOptions(*args, **kwargs):
pass
def TwoSideBySideViewArrangement(*args, **kwargs):
pass
def SelectAllHairSystem(*args, **kwargs):
pass
def CreateVolumeSphere(*args, **kwargs):
pass
def MakeFluidCollide(*args, **kwargs):
pass
def OpenSceneOptions(*args, **kwargs):
pass
def PolygonBooleanDifferenceOptions(*args, **kwargs):
pass
def SelectSimilarOptions(*args, **kwargs):
pass
def MoveInfluence(*args, **kwargs):
pass
def ExtendSurfaces(*args, **kwargs):
pass
def LayoutUVRectangle(*args, **kwargs):
pass
def CreateCluster(*args, **kwargs):
pass
def DeleteAllStrokes(*args, **kwargs):
pass
def ReplaceObjects(*args, **kwargs):
pass
def CleanupPolygonOptions(*args, **kwargs):
pass
def HypershadeGraphRemoveUpstream(*args, **kwargs):
pass
def BendCurvesOptions(*args, **kwargs):
pass
def ImportAnimOptions(*args, **kwargs):
pass
def RenderViewNextImage(*args, **kwargs):
pass
def NodeEditorAddOnNodeCreate(*args, **kwargs):
pass
def ModifyPaintValuePress(*args, **kwargs):
pass
def DisplayShadedAndTextured(*args, **kwargs):
pass
def AutoProjection(*args, **kwargs):
pass
def CreateSubdivRegion(*args, **kwargs):
pass
def RenderLayerRelationshipEditor(*args, **kwargs):
pass
def ModifyDisplacementRelease(*args, **kwargs):
pass
def HideMarkers(*args, **kwargs):
pass
def CreateJiggleDeformer(*args, **kwargs):
pass
def RaiseApplicationWindows(*args, **kwargs):
pass
def FrameAllInAllViews(*args, **kwargs):
pass
def InteractiveSplitToolOptions(*args, **kwargs):
pass
def ToggleMeshMaps(*args, **kwargs):
pass
def CreateDirectionalLightOptions(*args, **kwargs):
pass
def SurfaceBooleanIntersectToolOptions(*args, **kwargs):
pass
def DetachSkeletonJoints(*args, **kwargs):
pass
def RemoveWireOptions(*args, **kwargs):
pass
def MirrorPolygonGeometryOptions(*args, **kwargs):
pass
def StraightenCurvesOptions(*args, **kwargs):
pass
def NonSacredTool(*args, **kwargs):
pass
def RemoveFromCharacterSet(*args, **kwargs):
pass
def RemoveShrinkWrapSurfaces(*args, **kwargs):
pass
def HypershadeConvertToFileTextureOptionBox(*args, **kwargs):
pass
def ReattachSkeleton(*args, **kwargs):
pass
def ToggleRotationPivots(*args, **kwargs):
pass
def RemoveInfluence(*args, **kwargs):
pass
def CreatePolygonPlane(*args, **kwargs):
pass
def AddPfxToHairSystem(*args, **kwargs):
pass
def PolyMerge(*args, **kwargs):
pass
def StraightenCurves(*args, **kwargs):
pass
def PruneSmallWeightsOptions(*args, **kwargs):
pass
def PixelMoveUp(*args, **kwargs):
pass
def attachNclothCache(*args, **kwargs):
pass
def IKHandleToolOptions(*args, **kwargs):
pass
def OutlinerRevealSelected(*args, **kwargs):
pass
def SoftModTool(*args, **kwargs):
pass
def PolyExtrudeFaces(*args, **kwargs):
pass
def SculptPolygonsToolOptions(*args, **kwargs):
pass
def NodeEditorAdditiveGraphingMode(*args, **kwargs):
pass
def attachCache(*args, **kwargs):
pass
def OpenCloseSurfacesOptions(*args, **kwargs):
pass
def DeleteAllSounds(*args, **kwargs):
pass
def PolyEditEdgeFlow(*args, **kwargs):
pass
def HypershadeSetTraversalDepthUnlim(*args, **kwargs):
pass
def ProjectCurveOnSurface(*args, **kwargs):
pass
def PublishNode(*args, **kwargs):
pass
def ModifyConstraintAxisOptions(*args, **kwargs):
pass
def PolyConvertToRingAndCollapse(*args, **kwargs):
pass
def Goal(*args, **kwargs):
pass
def ToggleHulls(*args, **kwargs):
pass
def SetPassiveKey(*args, **kwargs):
pass
def AddEdgeDivisions(*args, **kwargs):
pass
def CreateNSoftBodyOptions(*args, **kwargs):
pass
def HypershadeSortReverseOrder(*args, **kwargs):
pass
def CopyFlexor(*args, **kwargs):
pass
def PokePolygon(*args, **kwargs):
pass
def HypershadeSelectCamerasAndImagePlanes(*args, **kwargs):
pass
def DeleteAllHistory(*args, **kwargs):
pass
def SetMeshRelaxTool(*args, **kwargs):
pass
def NodeEditorExportCompound(*args, **kwargs):
pass
def CycleIKHandleStickyState(*args, **kwargs):
pass
def HideDeformers(*args, **kwargs):
pass
def MergeVertices(*args, **kwargs):
pass
def SetMeshGrabTool(*args, **kwargs):
pass
def SelectEdgeMask(*args, **kwargs):
pass
def AddFaceDivisionsOptions(*args, **kwargs):
pass
def EnableRigidBodies(*args, **kwargs):
pass
def SetMeshBulgeTool(*args, **kwargs):
pass
def HideDynamicConstraints(*args, **kwargs):
pass
def ToggleAutoFrame(*args, **kwargs):
pass
def PaintGeomCacheToolOptions(*args, **kwargs):
pass
def PaintEffectsToNurbs(*args, **kwargs):
pass
def SelectCurveCVsAll(*args, **kwargs):
pass
def EPCurveToolOptions(*args, **kwargs):
pass
def BrushPresetBlendShape(*args, **kwargs):
pass
def HideCameraManipulators(*args, **kwargs):
pass
def OutlinerToggleReferenceNodes(*args, **kwargs):
pass
def RestoreUIElements(*args, **kwargs):
pass
def geometryMergeCache(*args, **kwargs):
pass
def SelectComponentToolMarkingMenu(*args, **kwargs):
pass
def SetKey(*args, **kwargs):
pass
def NodeEditorHideAttributes(*args, **kwargs):
pass
def CurveUtilitiesMarkingMenu(*args, **kwargs):
pass
def fluidReplaceFrames(*args, **kwargs):
pass
def PolygonSelectionConstraints(*args, **kwargs):
pass
def SelectAllTransforms(*args, **kwargs):
pass
def HIKPinRotate(*args, **kwargs):
pass
def nucleusDisplayTransformNodes(*args, **kwargs):
pass
def MakeBoats(*args, **kwargs):
pass
def HIKSelectedMode(*args, **kwargs):
pass
def SelectAllRigidBodies(*args, **kwargs):
pass
def PolyRemoveAllCrease(*args, **kwargs):
pass
def AddBoatLocator(*args, **kwargs):
pass
def TranslateToolMarkingMenu(*args, **kwargs):
pass
def ToggleIKHandleSnap(*args, **kwargs):
pass
def PanelPreferencesWindow(*args, **kwargs):
pass
def DopeSheetEditor(*args, **kwargs):
pass
def ShowMeshSprayToolOptions(*args, **kwargs):
pass
def MirrorCutPolygonGeometry(*args, **kwargs):
pass
def SurfaceFlowOptions(*args, **kwargs):
pass
def ChangeUVSize(*args, **kwargs):
pass
def SelectAllNURBSCurves(*args, **kwargs):
pass
def BridgeEdge(*args, **kwargs):
pass
def BakeSpringAnimationOptions(*args, **kwargs):
pass
def ToggleHelpLine(*args, **kwargs):
pass
def MergeUVOptions(*args, **kwargs):
pass
def HideSubdivSurfaces(*args, **kwargs):
pass
def ToolSettingsWindow(*args, **kwargs):
pass
def BreakLightLinks(*args, **kwargs):
pass
def CreateSpotLightOptions(*args, **kwargs):
pass
def GetFluidExample(*args, **kwargs):
pass
def ToggleSoftEdges(*args, **kwargs):
pass
def CreateGhostOptions(*args, **kwargs):
pass
def nucleusDisplayMaterialNodes(*args, **kwargs):
pass
def HideStrokes(*args, **kwargs):
pass
def InsertKeyToolDeactivate(*args, **kwargs):
pass
def MarkingMenuPreferencesWindow(*args, **kwargs):
pass
def EmitFluidFromObject(*args, **kwargs):
pass
def SubdivSurfaceMatchTopology(*args, **kwargs):
pass
def OpenCloseSurfaces(*args, **kwargs):
pass
def ShowMeshScrapeToolOptions(*args, **kwargs):
pass
def MakePressureCurveOptions(*args, **kwargs):
pass
def DeleteUVs(*args, **kwargs):
pass
def HypershadeDisplayAllShapes(*args, **kwargs):
pass
def TogglePolyCount(*args, **kwargs):
pass
def BevelPlus(*args, **kwargs):
pass
def NodeEditorToggleNodeTitleMode(*args, **kwargs):
pass
def RelaxInitialState(*args, **kwargs):
pass
def MakePondBoats(*args, **kwargs):
pass
def HypershadeAdditiveGraphingMode(*args, **kwargs):
pass
def MoveToolOptions(*args, **kwargs):
pass
def TogglePolyDisplayHardEdgesColor(*args, **kwargs):
pass
def FineLevelComponentDisplay(*args, **kwargs):
pass
def InTangentPlateau(*args, **kwargs):
pass
def PickWalkDown(*args, **kwargs):
pass
def SelectAllGeometry(*args, **kwargs):
pass
def HypershadeToggleUseAssetsAndPublishedAttributes(*args, **kwargs):
pass
def OutTangentLinear(*args, **kwargs):
pass
def RepeatLastActionAtMousePosition(*args, **kwargs):
pass
def SoftModDeformerOptions(*args, **kwargs):
pass
def SelectAllParticles(*args, **kwargs):
pass
def Create3DContainerEmitter(*args, **kwargs):
pass
def RemoveShrinkWrapInnerObject(*args, **kwargs):
pass
def MoveIKtoFK(*args, **kwargs):
pass
def PruneSmallWeights(*args, **kwargs):
pass
def SetMeshSmearTool(*args, **kwargs):
pass
def nClothCache(*args, **kwargs):
pass
def GraphPaste(*args, **kwargs):
pass
def MirrorPolygonGeometry(*args, **kwargs):
pass
def CreateNURBSTorusOptions(*args, **kwargs):
pass
def ShelfPreferencesWindow(*args, **kwargs):
pass
def ToggleFbikDetails(*args, **kwargs):
pass
def SimplifyCurveOptions(*args, **kwargs):
pass
def deleteHistoryAheadOfGeomCache(*args, **kwargs):
pass
def ExportAnimOptions(*args, **kwargs):
pass
def OutlinerExpandAllSelectedItems(*args, **kwargs):
pass
def ShareColorInstances(*args, **kwargs):
pass
def ConvertSelectionToUVPerimeter(*args, **kwargs):
pass
def SmoothingDisplayToggle(*args, **kwargs):
pass
def HypershadeOpenRenderViewWindow(*args, **kwargs):
pass
def UntrimSurfacesOptions(*args, **kwargs):
pass
def SetVertexNormal(*args, **kwargs):
pass
def CreateCurveFromPolyOptions(*args, **kwargs):
pass
def UniversalManip(*args, **kwargs):
pass
def UnpublishNode(*args, **kwargs):
pass
def HIKToggleReleasePinning(*args, **kwargs):
pass
def MergeEdgeTool(*args, **kwargs):
pass
def SetShrinkWrapTarget(*args, **kwargs):
pass
def ToggleCullingVertices(*args, **kwargs):
pass
def UnmirrorSmoothProxyOptions(*args, **kwargs):
pass
def ShowJoints(*args, **kwargs):
pass
def NodeEditorShapeMenuStateNoShapes(*args, **kwargs):
pass
def EnableFluids(*args, **kwargs):
pass
def UniversalManipOptions(*args, **kwargs):
pass
def ThreePointArcTool(*args, **kwargs):
pass
def PaintEffectsToNurbsOptions(*args, **kwargs):
pass
def RotateToolMarkingMenuPopDown(*args, **kwargs):
pass
def BreakTangents(*args, **kwargs):
pass
def CreateClip(*args, **kwargs):
pass
def ResetSoftSelectOptions(*args, **kwargs):
pass
def geometryAppendCacheOpt(*args, **kwargs):
pass
def UIModeMarkingMenuPopDown(*args, **kwargs):
pass
def NodeEditorCreateCompound(*args, **kwargs):
pass
def CurveEditTool(*args, **kwargs):
pass
def UnfoldUVOptions(*args, **kwargs):
pass
def TransformPolygonComponentOptions(*args, **kwargs):
pass
def PolygonCollapse(*args, **kwargs):
pass
def NodeEditorPinSelected(*args, **kwargs):
pass
def CreateConstructionPlaneOptions(*args, **kwargs):
pass
def CreateBlendShapeOptions(*args, **kwargs):
pass
def UncreaseSubdivSurface(*args, **kwargs):
pass
def SelectMaskToolMarkingMenuPopDown(*args, **kwargs):
pass
def ClosestPointOnOptions(*args, **kwargs):
pass
def CreateAreaLight(*args, **kwargs):
pass
def SaveSceneOptions(*args, **kwargs):
pass
def HypershadeIncreaseTraversalDepth(*args, **kwargs):
pass
def ToggleCompIDs(*args, **kwargs):
pass
def HypershadeDisplayNoShapes(*args, **kwargs):
pass
def CreateTextureDeformer(*args, **kwargs):
pass
def HideObjectGeometry(*args, **kwargs):
pass
def ToggleVertMetadata(*args, **kwargs):
pass
def RetimeKeysToolOptions(*args, **kwargs):
pass
def CreateSet(*args, **kwargs):
pass
def JointToolOptions(*args, **kwargs):
pass
def IncrementFluidCenter(*args, **kwargs):
pass
def DisableAll(*args, **kwargs):
pass
def OffsetCurveOnSurface(*args, **kwargs):
pass
def DeletePolyElements(*args, **kwargs):
pass
def ActivateGlobalScreenSlider(*args, **kwargs):
pass
def AttachSelectedAsSourceField(*args, **kwargs):
pass
def DisplayShadingMarkingMenu(*args, **kwargs):
pass
def Art3dPaintToolOptions(*args, **kwargs):
pass
def CreatePolygonSoccerBallOptions(*args, **kwargs):
pass
def ZoomTool(*args, **kwargs):
pass
def TogglePaintAtDepth(*args, **kwargs):
pass
def PlaybackForward(*args, **kwargs):
pass
def PolygonNormalEditTool(*args, **kwargs):
pass
def ExtrudeVertex(*args, **kwargs):
pass
def OutlinerToggleReferenceMembers(*args, **kwargs):
pass
def PerformanceSettingsWindow(*args, **kwargs):
pass
def SelectAll(*args, **kwargs):
pass
def SpreadSheetEditor(*args, **kwargs):
pass
def CombinePolygons(*args, **kwargs):
pass
def DeleteConstraints(*args, **kwargs):
pass
def PencilCurveToolOptions(*args, **kwargs):
pass
def QualityDisplayMarkingMenu(*args, **kwargs):
pass
def CreateSubdivSurfaceOptions(*args, **kwargs):
pass
def AnimLayerRelationshipEditor(*args, **kwargs):
pass
def GraphDeleteOptions(*args, **kwargs):
pass
def ToggleLatticePoints(*args, **kwargs):
pass
def WireTool(*args, **kwargs):
pass
def CreateNURBSCylinder(*args, **kwargs):
pass
def ExtendCurveOnSurface(*args, **kwargs):
pass
def ParticleInstancer(*args, **kwargs):
pass
def ToggleEvaluationManagerVisibility(*args, **kwargs):
pass
def CopyUVsToUVSet(*args, **kwargs):
pass
def WhatsNewHighlightingOn(*args, **kwargs):
pass
def HypershadeSelectTextures(*args, **kwargs):
pass
def PaintEffectsToolOptions(*args, **kwargs):
pass
def SmoothHairCurvesOptions(*args, **kwargs):
pass
def NodeEditorGraphNoShapes(*args, **kwargs):
pass
def WaveOptions(*args, **kwargs):
pass
def SelectToolOptionsMarkingMenu(*args, **kwargs):
pass
def DecreaseExposureFine(*args, **kwargs):
pass
def HideGeometry(*args, **kwargs):
pass
def Parent(*args, **kwargs):
pass
def nClothDeleteHistory(*args, **kwargs):
pass
def NewtonOptions(*args, **kwargs):
pass
def SelectTimeWarp(*args, **kwargs):
pass
def VortexOptions(*args, **kwargs):
pass
def AddKeyToolActivate(*args, **kwargs):
pass
def PaintVertexColorToolOptions(*args, **kwargs):
pass
def nClothCreate(*args, **kwargs):
pass
def HypershadeDuplicateShadingNetwork(*args, **kwargs):
pass
def ToggleCapsLockDisplay(*args, **kwargs):
pass
def VisorWindow(*args, **kwargs):
pass
def PaintReduceWeightsTool(*args, **kwargs):
pass
def HypershadeRenameActiveTab(*args, **kwargs):
pass
def UpdateReferenceSurface(*args, **kwargs):
pass
def PaintCacheToolOptions(*args, **kwargs):
pass
def ReverseSurfaceDirection(*args, **kwargs):
pass
def HypershadeOpenCreateWindow(*args, **kwargs):
pass
def BlendShapeEditor(*args, **kwargs):
pass
def SculptGeometryTool(*args, **kwargs):
pass
def TwoPointArcTool(*args, **kwargs):
pass
def MakeCurvesDynamic(*args, **kwargs):
pass
def PolygonApplyColorOptions(*args, **kwargs):
pass
def SelectShortestEdgePathTool(*args, **kwargs):
pass
def ScaleKeysOptions(*args, **kwargs):
pass
def LayerRelationshipEditor(*args, **kwargs):
pass
def ParentOptions(*args, **kwargs):
pass
def DuplicateEdgesOptions(*args, **kwargs):
pass
def Create2DContainerEmitter(*args, **kwargs):
pass
def CircularFilletOptions(*args, **kwargs):
pass
def DisableRigidBodies(*args, **kwargs):
pass
def HypershadeGraphRemoveSelected(*args, **kwargs):
pass
def Bend(*args, **kwargs):
pass
def TexSewActivateBrushSize(*args, **kwargs):
pass
def HoldCurrentKeys(*args, **kwargs):
pass
def SavePreferences(*args, **kwargs):
pass
def AutoPaintMarkingMenu(*args, **kwargs):
pass
def GlobalStitch(*args, **kwargs):
pass
def HypershadeDuplicateWithConnections(*args, **kwargs):
pass
def TexSculptActivateBrushStrength(*args, **kwargs):
pass
def ToggleTimeSlider(*args, **kwargs):
pass
def CreateImagePlane(*args, **kwargs):
pass
def STRSTweakModeOn(*args, **kwargs):
pass
def FourViewLayout(*args, **kwargs):
pass
def TesselateSubdivSurface(*args, **kwargs):
pass
def CreateDagContainerOptions(*args, **kwargs):
pass
def geometryDeleteCacheOpt(*args, **kwargs):
pass
def SubstituteGeometryOptions(*args, **kwargs):
pass
def TangetConstraint(*args, **kwargs):
pass
def DetachEdgeComponent(*args, **kwargs):
pass
def ProfilerToolShowAll(*args, **kwargs):
pass
def StitchSurfacePointsOptions(*args, **kwargs):
pass
def NodeEditorUnpinSelected(*args, **kwargs):
pass
def geometryCache(*args, **kwargs):
pass
def AssignHairConstraint(*args, **kwargs):
pass
def ProfilerToolCategoryView(*args, **kwargs):
pass
def HypershadeConvertPSDToLayeredTexture(*args, **kwargs):
pass
def AssignOfflineFileFromRefEdOptions(*args, **kwargs):
pass
def TangentsFlat(*args, **kwargs):
pass
def InteractivePlayback(*args, **kwargs):
pass
def DeltaMush(*args, **kwargs):
pass
def RandomizeFolliclesOptions(*args, **kwargs):
pass
def ToggleRangeSlider(*args, **kwargs):
pass
def ArtPaintSelectToolOptions(*args, **kwargs):
pass
def CreatePolygonPipe(*args, **kwargs):
pass
def Flare(*args, **kwargs):
pass
def CreateHairOptions(*args, **kwargs):
pass
def ToggleMeshPoints(*args, **kwargs):
pass
def PixelMoveLeft(*args, **kwargs):
pass
def SelectAllLights(*args, **kwargs):
pass
def ArtPaintAttrTool(*args, **kwargs):
pass
def SurfaceEditingTool(*args, **kwargs):
pass
def PreloadReferenceEditor(*args, **kwargs):
pass
def ConvertInstanceToObject(*args, **kwargs):
pass
def ShowMeshGrabUVToolOptions(*args, **kwargs):
pass
def SculptMeshUnfreezeAll(*args, **kwargs):
pass
def SplitEdge(*args, **kwargs):
pass
def PreInfinityOscillate(*args, **kwargs):
pass
def ToggleFocalLength(*args, **kwargs):
pass
def DeleteAllSculptObjects(*args, **kwargs):
pass
def ShowMeshFillToolOptions(*args, **kwargs):
pass
def HIKCharacterControlsTool(*args, **kwargs):
pass
def Air(*args, **kwargs):
pass
def CreateParticleDiskCacheOptions(*args, **kwargs):
pass
def SetKeyTranslate(*args, **kwargs):
pass
def CreateAnnotateNode(*args, **kwargs):
pass
def SetInitialState(*args, **kwargs):
pass
def CreateMotionTrailOptions(*args, **kwargs):
pass
def GoToBindPose(*args, **kwargs):
pass
def HypershadeSortByTime(*args, **kwargs):
pass
def ShowKinematics(*args, **kwargs):
pass
def HypershadeDeleteNodes(*args, **kwargs):
pass
def ConvertToFrozen(*args, **kwargs):
pass
def SetFullBodyIKKeysBodyPart(*args, **kwargs):
pass
def HypershadeSaveSwatchesToDisk(*args, **kwargs):
pass
def DeleteAllFluids(*args, **kwargs):
pass
def CreateContainer(*args, **kwargs):
pass
def SlideEdgeTool(*args, **kwargs):
pass
def Unparent(*args, **kwargs):
pass
def SetFluidAttrFromCurveOptions(*args, **kwargs):
pass
def ShowDynamicsUI(*args, **kwargs):
pass
def CycleBackgroundColor(*args, **kwargs):
pass
def AddPondDynamicLocator(*args, **kwargs):
pass
def AddAnimationOffsetOptions(*args, **kwargs):
pass
def SetCurrentUVSet(*args, **kwargs):
pass
def AddEdgeDivisionsOptions(*args, **kwargs):
pass
def EnableNucleuses(*args, **kwargs):
pass
def SequenceEditor(*args, **kwargs):
pass
def PaintFluidsToolOptions(*args, **kwargs):
pass
def ToggleVisibilityAndKeepSelection(*args, **kwargs):
pass
def DynamicRelationshipEditor(*args, **kwargs):
pass
def HypershadePinSelected(*args, **kwargs):
pass
def BrushPresetBlendShading(*args, **kwargs):
pass
def ToggleMeshUVBorders(*args, **kwargs):
pass
def SelectedAnimLayer(*args, **kwargs):
pass
def ResolveInterpenetration(*args, **kwargs):
pass
def geometryExportCache(*args, **kwargs):
pass
def NodeEditorGridToggleSnap(*args, **kwargs):
pass
def NURBSTexturePlacementToolOptions(*args, **kwargs):
pass
def FloatSelectedObjects(*args, **kwargs):
pass
def Triangulate(*args, **kwargs):
pass
def PolygonPaste(*args, **kwargs):
pass
def RenderDiagnostics(*args, **kwargs):
pass
def LoopBrushAnimationOptions(*args, **kwargs):
pass
def CreateDagContainer(*args, **kwargs):
pass
def NURBSSmoothnessMedium(*args, **kwargs):
pass
def DuplicateCurveOptions(*args, **kwargs):
pass
def PolyMergeVertices(*args, **kwargs):
pass
def SelectPointsMask(*args, **kwargs):
pass
def AddBlendShape(*args, **kwargs):
pass
def ConnectNodeToIKFK(*args, **kwargs):
pass
def CreateCameraAim(*args, **kwargs):
pass
def DoUnghostOptions(*args, **kwargs):
pass
def ConformPolygonNormals(*args, **kwargs):
pass
def SurfaceEditingToolOptions(*args, **kwargs):
pass
def ChangeFullBodyPivotPlacement(*args, **kwargs):
pass
def BakeSimulationOptions(*args, **kwargs):
pass
def CombinePolygonsOptions(*args, **kwargs):
pass
def StitchEdgesTool(*args, **kwargs):
pass
def GridOptions(*args, **kwargs):
pass
def ToggleShelf(*args, **kwargs):
pass
def RenameAttribute(*args, **kwargs):
pass
def PreferencesWindow(*args, **kwargs):
pass
def GeometryToBoundingBoxOptions(*args, **kwargs):
pass
def RemoveConstraintTarget(*args, **kwargs):
pass
def ToggleShowBufferCurves(*args, **kwargs):
pass
def CreateFluidCacheOptions(*args, **kwargs):
pass
def MoveRight(*args, **kwargs):
pass
def ExtrudeEdgeOptions(*args, **kwargs):
pass
def DeleteHairCache(*args, **kwargs):
pass
def SubdivSurfaceCleanTopology(*args, **kwargs):
pass
def OpenCloseCurve(*args, **kwargs):
pass
def DeleteTextureReferenceObject(*args, **kwargs):
pass
def HypershadeDeleteSelected(*args, **kwargs):
pass
def NodeEditorToggleNodeSelectedPins(*args, **kwargs):
pass
def RefineSelectedComponents(*args, **kwargs):
pass
def AnimationSnapshot(*args, **kwargs):
pass
def RemoveWire(*args, **kwargs):
pass
def MoveSkinJointsToolOptions(*args, **kwargs):
pass
def CreatePond(*args, **kwargs):
pass
def ShowSubdivSurfaces(*args, **kwargs):
pass
def TogglePolyDisplayEdges(*args, **kwargs):
pass
def PointConstraintOptions(*args, **kwargs):
pass
def RebuildCurveOptions(*args, **kwargs):
pass
def CreatePolyFromPreview(*args, **kwargs):
pass
def FilletBlendTool(*args, **kwargs):
pass
def InTangentFlat(*args, **kwargs):
pass
def ShowShadingGroupAttributeEditor(*args, **kwargs):
pass
def PickColorActivate(*args, **kwargs):
pass
def SelectAllFollicles(*args, **kwargs):
pass
def SubdCutUVs(*args, **kwargs):
pass
def HypershadeToggleShowNamespace(*args, **kwargs):
pass
def OutTangentFixed(*args, **kwargs):
pass
def AddWireOptions(*args, **kwargs):
pass
def ShowRenderingUI(*args, **kwargs):
pass
def SnapToPoint(*args, **kwargs):
pass
def HotkeyPreferencesWindow(*args, **kwargs):
pass
def RemoveShrinkWrapTarget(*args, **kwargs):
pass
def HypershadeWindow(*args, **kwargs):
pass
def DeleteAllMotionPaths(*args, **kwargs):
pass
def ModifyDisplacementPress(*args, **kwargs):
pass
def ShowNURBSSurfaces(*args, **kwargs):
pass
def PruneLattice(*args, **kwargs):
pass
def GravityOptions(*args, **kwargs):
pass
def MirrorJoint(*args, **kwargs):
pass
def CreateNURBSSquareOptions(*args, **kwargs):
pass
def ExtractFace(*args, **kwargs):
pass
def PlanarOptions(*args, **kwargs):
pass
def ToggleFastInteraction(*args, **kwargs):
pass
def HypershadeToggleNodeTitleMode(*args, **kwargs):
pass
def Create2DContainerOptions(*args, **kwargs):
pass
def ShowMeshSculptToolOptions(*args, **kwargs):
pass
def ExpandSelectedComponents(*args, **kwargs):
pass
def HypershadeShapeMenuStateAll(*args, **kwargs):
pass
def ToggleModelEditorBars(*args, **kwargs):
pass
def SmoothTangent(*args, **kwargs):
pass
def OutlinerToggleDAGOnly(*args, **kwargs):
pass
def Delete(*args, **kwargs):
pass
def RemoveLatticeTweaks(*args, **kwargs):
pass
def NodeEditorAddIterationStatePorts(*args, **kwargs):
pass
def CreateNURBSSquare(*args, **kwargs):
pass
def AddOceanDynamicLocatorOptions(*args, **kwargs):
pass
def AddPointsTool(*args, **kwargs):
pass
def ToggleCreaseEdges(*args, **kwargs):
pass
def EnableDynamicConstraints(*args, **kwargs):
pass
def CreateNURBSCylinderOptions(*args, **kwargs):
pass
def ThreeBottomSplitViewArrangement(*args, **kwargs):
pass
def PaintEffectsToCurveOptions(*args, **kwargs):
pass
def OffsetEdgeLoopTool(*args, **kwargs):
pass
def HypershadeOpenSpreadSheetWindow(*args, **kwargs):
pass
def NodeEditorSelectDownStream(*args, **kwargs):
pass
def ResetLattice(*args, **kwargs):
pass
def U3DBrushSizeOn(*args, **kwargs):
pass
def CurlCurves(*args, **kwargs):
pass
def OpenVisorForMeshes(*args, **kwargs):
pass
def TransformNoSelectOnTool(*args, **kwargs):
pass
def PolygonClearClipboard(*args, **kwargs):
pass
def SelectTool(*args, **kwargs):
pass
def SelectAllFluids(*args, **kwargs):
pass
def SphericalProjection(*args, **kwargs):
pass
def MoveLeft(*args, **kwargs):
pass
def LightningOptions(*args, **kwargs):
pass
def ReplaceObjectsOptions(*args, **kwargs):
pass
def SelectLinesMask(*args, **kwargs):
pass
def AbortCurrentTool(*args, **kwargs):
pass
def CloseFrontWindow(*args, **kwargs):
pass
def DisplayLight(*args, **kwargs):
pass
def HypershadeHideAttributes(*args, **kwargs):
pass
def RenderViewPrevImage(*args, **kwargs):
pass
def CVCurveToolOptions(*args, **kwargs):
pass
def HypergraphIncreaseDepth(*args, **kwargs):
pass
def RenderFlagsWindow(*args, **kwargs):
pass
def ExportSkinWeightMaps(*args, **kwargs):
pass
def AveragePolygonNormalsOptions(*args, **kwargs):
pass
def CreateText(*args, **kwargs):
pass
def RenderOptions(*args, **kwargs):
pass
def ModifyLowerRadiusPress(*args, **kwargs):
pass
def CreateMotionTrail(*args, **kwargs):
pass
def CreateNURBSConeOptions(*args, **kwargs):
pass
def ExportOptions(*args, **kwargs):
pass
def CreateSculptDeformer(*args, **kwargs):
pass
def FreeformFilletOptions(*args, **kwargs):
pass
def InvertSelection(*args, **kwargs):
pass
def ModelingPanelUndoViewChange(*args, **kwargs):
pass
def CreateEmptyGroup(*args, **kwargs):
pass
def StitchEdgesToolOptions(*args, **kwargs):
pass
def PaintGridOptions(*args, **kwargs):
pass
def RemoveWrapInfluence(*args, **kwargs):
pass
def IncreaseManipulatorSize(*args, **kwargs):
pass
def DetachVertexComponent(*args, **kwargs):
pass
def SquareSurface(*args, **kwargs):
pass
def SubdivProxyOptions(*args, **kwargs):
pass
def ObjectCentricLightLinkingEditor(*args, **kwargs):
pass
def HypershadeCreateTab(*args, **kwargs):
pass
def NURBSSmoothnessRough(*args, **kwargs):
pass
def MirrorDeformerWeightsOptions(*args, **kwargs):
pass
def RebuildSurfacesOptions(*args, **kwargs):
pass
def PublishConnections(*args, **kwargs):
pass
def DeleteAllNParticles(*args, **kwargs):
pass
def ArchiveSceneOptions(*args, **kwargs):
pass
def CreatePolygonPyramidOptions(*args, **kwargs):
pass
def FloatSelectedPondObjects(*args, **kwargs):
pass
def PolyMergeEdges(*args, **kwargs):
pass
def PlanarProjectionOptions(*args, **kwargs):
pass
def ExtrudeFaceOptions(*args, **kwargs):
pass
def Import(*args, **kwargs):
pass
def ConvertSelectionToShellBorder(*args, **kwargs):
pass
def AddOceanDynamicLocator(*args, **kwargs):
pass
def PolyExtrudeFacesOptions(*args, **kwargs):
pass
def SculptSurfacesTool(*args, **kwargs):
pass
def SplitUV(*args, **kwargs):
pass
def CollapseSubdivSurfaceHierarchyOptions(*args, **kwargs):
pass
def RenderIntoNewWindow(*args, **kwargs):
pass
def PolyEditEdgeFlowOptions(*args, **kwargs):
pass
def Quadrangulate(*args, **kwargs):
pass
def PolyConvertToRingAndSplit(*args, **kwargs):
pass
def AlignUV(*args, **kwargs):
pass
def CreatePointLightOptions(*args, **kwargs):
pass
def ProjectCurveOnMesh(*args, **kwargs):
pass
def GraphCutOptions(*args, **kwargs):
pass
def ToggleKeepHardEdgeCulling(*args, **kwargs):
pass
def ClearInitialState(*args, **kwargs):
pass
def CreateNURBSCube(*args, **kwargs):
pass
def ExpressionEditor(*args, **kwargs):
pass
def PolyBrushMarkingMenu(*args, **kwargs):
pass
def ToggleEdgeMetadata(*args, **kwargs):
pass
def CopySkinWeights(*args, **kwargs):
pass
def SetMeshSprayTool(*args, **kwargs):
pass
def PolyCreaseTool(*args, **kwargs):
pass
def PokePolygonOptions(*args, **kwargs):
pass
def HypershadeSelectObjectsWithMaterials(*args, **kwargs):
pass
def SetMeshRepeatTool(*args, **kwargs):
pass
def SmoothCurveOptions(*args, **kwargs):
pass
def NodeEditorGraphClearGraph(*args, **kwargs):
pass
def PointOnCurve(*args, **kwargs):
pass
def DeactivateGlobalScreenSliderModeMarkingMenu(*args, **kwargs):
pass
def HideFollicles(*args, **kwargs):
pass
def SetMeshGrabUVTool(*args, **kwargs):
pass
def NewSceneOptions(*args, **kwargs):
pass
def SelectEdgeRing(*args, **kwargs):
pass
def MakePondMotorBoatsOptions(*args, **kwargs):
pass
def SetMeshFillTool(*args, **kwargs):
pass
def SetPreferredAngleOptions(*args, **kwargs):
pass
def PaintOperationMarkingMenuPress(*args, **kwargs):
pass
def SelectCurveCVsFirst(*args, **kwargs):
pass
def PolyConvertToLoopAndDuplicate(*args, **kwargs):
pass
def EditOversamplingForCacheSettings(*args, **kwargs):
pass
def HypershadeRemoveAsset(*args, **kwargs):
pass
def SetPreferredAngle(*args, **kwargs):
pass
def HideCameras(*args, **kwargs):
pass
def PFXUVSetLinkingEditor(*args, **kwargs):
pass
def PolyConvertToLoopAndDelete(*args, **kwargs):
pass
def GetSettingsFromSelectedStroke(*args, **kwargs):
pass
def SelectComponentToolMarkingMenuPopDown(*args, **kwargs):
pass
def Birail3(*args, **kwargs):
pass
def CustomPolygonDisplay(*args, **kwargs):
pass
def Twist(*args, **kwargs):
pass
def PostInfinityConstant(*args, **kwargs):
pass
def SelectAllWires(*args, **kwargs):
pass
def GrowPolygonSelectionRegion(*args, **kwargs):
pass
def ToggleLatticeShape(*args, **kwargs):
pass
def PolySpinEdgeForward(*args, **kwargs):
pass
def SelectSharedColorInstances(*args, **kwargs):
pass
def SelectAllRigidConstraints(*args, **kwargs):
pass
def LatticeDeformKeysTool(*args, **kwargs):
pass
def ParentConstraint(*args, **kwargs):
pass
def SelectAllNURBSSurfaces(*args, **kwargs):
pass
def TangentsClamped(*args, **kwargs):
pass
def CharacterSetEditor(*args, **kwargs):
pass
def PolyAssignSubdivHoleOptions(*args, **kwargs):
pass
def ToggleHikDetails(*args, **kwargs):
pass
def DisableMemoryCaching(*args, **kwargs):
pass
def BatchRender(*args, **kwargs):
pass
def MergeVertexTool(*args, **kwargs):
pass
def HideWrapInfluences(*args, **kwargs):
pass
def AttachToPathOptions(*args, **kwargs):
pass
def CreateSubdivCone(*args, **kwargs):
pass
def GhostObject(*args, **kwargs):
pass
def CreateIllustratorCurves(*args, **kwargs):
pass
def FluidGradientsOptions(*args, **kwargs):
pass
def InteractiveBindSkin(*args, **kwargs):
pass
def SetMeshSmoothTool(*args, **kwargs):
pass
def PoleVectorConstraint(*args, **kwargs):
pass
def SubdividePolygonOptions(*args, **kwargs):
pass
def DetachCurve(*args, **kwargs):
pass
def BevelPlusOptions(*args, **kwargs):
pass
def NodeEditorToggleZoomOut(*args, **kwargs):
pass
def RemoveBlendShapeOptions(*args, **kwargs):
pass
def MakePondBoatsOptions(*args, **kwargs):
pass
def HypershadeConnectSelected(*args, **kwargs):
pass
def CreateLatticeOptions(*args, **kwargs):
pass
def RaiseMainWindow(*args, **kwargs):
pass
def TogglePolygonFaceTriangles(*args, **kwargs):
pass
def MakeMotionField(*args, **kwargs):
pass
def CreatePolygonHelix(*args, **kwargs):
pass
def FitBSpline(*args, **kwargs):
pass
def HypershadeRevertSelectedSwatches(*args, **kwargs):
pass
def ToggleMeshFaces(*args, **kwargs):
pass
def PickWalkUp(*args, **kwargs):
pass
def HypershadeUnpinSelected(*args, **kwargs):
pass
def ContourProjection(*args, **kwargs):
pass
def HypershadeRenameTab(*args, **kwargs):
pass
def CustomNURBSSmoothness(*args, **kwargs):
pass
def SculptMeshFrame(*args, **kwargs):
pass
def ShowBoundingBox(*args, **kwargs):
pass
def DeleteAllRigidBodies(*args, **kwargs):
pass
def CurveSmoothnessMedium(*args, **kwargs):
pass
def PublishChildAnchor(*args, **kwargs):
pass
def SetNormalAngle(*args, **kwargs):
pass
def ShowAllUI(*args, **kwargs):
pass
def nClothCacheOpt(*args, **kwargs):
pass
def MirrorSubdivSurfaceOptions(*args, **kwargs):
pass
def ShortPolygonNormals(*args, **kwargs):
pass
def GoToMinFrame(*args, **kwargs):
pass
def ToggleGrid(*args, **kwargs):
pass
def CreateAmbientLight(*args, **kwargs):
pass
def deleteNclothCache(*args, **kwargs):
pass
def ExportOfflineFileFromRefEdOptions(*args, **kwargs):
pass
def HypershadeShowDirectoriesOnly(*args, **kwargs):
pass
def ShareOneBrush(*args, **kwargs):
pass
def ConvertSelectionToVertices(*args, **kwargs):
pass
def Snap3PointsTo3Points(*args, **kwargs):
pass
def UpdateBindingSet(*args, **kwargs):
pass
def AnimationTurntableOptions(*args, **kwargs):
pass
def SelectHierarchy(*args, **kwargs):
pass
def DeleteAllDynamicConstraints(*args, **kwargs):
pass
def AddDivisions(*args, **kwargs):
pass
def SetVertexNormalOptions(*args, **kwargs):
pass
def NodeEditorCreateDoWhileCompound(*args, **kwargs):
pass
def ConnectJoint(*args, **kwargs):
pass
def UnpublishParentAnchor(*args, **kwargs):
pass
def CutSelected(*args, **kwargs):
pass
def HideBoundingBox(*args, **kwargs):
pass
def MergeUV(*args, **kwargs):
pass
def SetStrokeControlCurves(*args, **kwargs):
pass
def CreateContainerOptions(*args, **kwargs):
pass
def MakeLightLinks(*args, **kwargs):
pass
def EnableNParticles(*args, **kwargs):
pass
def CreateConstraintClipOptions(*args, **kwargs):
pass
def PaintEffectsWindow(*args, **kwargs):
pass
def NodeEditorSetTraversalDepthUnlim(*args, **kwargs):
pass
def HypershadePickWalkUp(*args, **kwargs):
pass
def NodeEditorShapeMenuStateAllExceptShadingGroupMembers(*args, **kwargs):
pass
def UnifyTangents(*args, **kwargs):
pass
def ResetWire(*args, **kwargs):
pass
def geometryDeleteCacheFramesOpt(*args, **kwargs):
pass
def NodeEditorSelectConnected(*args, **kwargs):
pass
def NodeEditorGraphUpDownstream(*args, **kwargs):
pass
def CurveSmoothnessCoarse(*args, **kwargs):
pass
def CreateCameraOnly(*args, **kwargs):
pass
def UnghostObject(*args, **kwargs):
pass
def NodeEditorRedockTornOffTab(*args, **kwargs):
pass
def LongPolygonNormals(*args, **kwargs):
pass
def CreateCurveFromPoly(*args, **kwargs):
pass
def Undo(*args, **kwargs):
pass
def SelectObjectsIlluminatedByLight(*args, **kwargs):
pass
def AddAttribute(*args, **kwargs):
pass
def NodeEditorPickWalkLeft(*args, **kwargs):
pass
def PaintVertexColorTool(*args, **kwargs):
pass
def HypershadeOpenBinsWindow(*args, **kwargs):
pass
def SelectEdgeLoopSp(*args, **kwargs):
pass
def PaintRandom(*args, **kwargs):
pass
def ChamferVertexOptions(*args, **kwargs):
pass
def replaceCacheFramesOpt(*args, **kwargs):
pass
def HypershadeEditTexture(*args, **kwargs):
pass
def CreateUVsBasedOnCameraOptions(*args, **kwargs):
pass
def NodeEditorGraphRemoveSelected(*args, **kwargs):
pass
def replaceCacheFrames(*args, **kwargs):
pass
def HideSmoothSkinInfluences(*args, **kwargs):
pass
def ToggleChannelBox(*args, **kwargs):
pass
def CreateShrinkWrapOptions(*args, **kwargs):
pass
def GeometryConstraintOptions(*args, **kwargs):
pass
def ToggleSelectionHandles(*args, **kwargs):
pass
def CreateFlexorWindow(*args, **kwargs):
pass
def SubdivSmoothnessRough(*args, **kwargs):
pass
def OffsetSurfaces(*args, **kwargs):
pass
def DisplayShadingMarkingMenuPopDown(*args, **kwargs):
pass
def DeleteSurfaceFlow(*args, **kwargs):
pass
def HypershadeDeleteDuplicateShadingNetworks(*args, **kwargs):
pass
def SetKeyScale(*args, **kwargs):
pass
def NodeEditorToggleCreateNodePane(*args, **kwargs):
pass
def ReducePolygonOptions(*args, **kwargs):
pass
def LockTangentWeight(*args, **kwargs):
pass
def SetKeyRotate(*args, **kwargs):
pass
def CreatePolygonTorus(*args, **kwargs):
pass
def PolygonSoftenHardenOptions(*args, **kwargs):
pass
def ToggleParticleCount(*args, **kwargs):
pass
def PluginManager(*args, **kwargs):
pass
def FilePathEditor(*args, **kwargs):
pass
def InTangentClamped(*args, **kwargs):
pass
def PerspRelationshipEditorLayout(*args, **kwargs):
pass
def SelectAllDynamicConstraints(*args, **kwargs):
pass
def OutTangentAuto(*args, **kwargs):
pass
def DeleteExpressions(*args, **kwargs):
pass
def SnapToMeshCenter(*args, **kwargs):
pass
def PerPointEmissionRates(*args, **kwargs):
pass
def SetKeyAnimated(*args, **kwargs):
pass
def DeleteAllLattices(*args, **kwargs):
pass
def ModifyUVVectorRelease(*args, **kwargs):
pass
def AddToCurrentSceneMudbox(*args, **kwargs):
pass
def ProportionalModificationTool(*args, **kwargs):
pass
def GraphSnapOptions(*args, **kwargs):
pass
def HypershadeGridToggleVisibility(*args, **kwargs):
pass
def MirrorDeformerWeights(*args, **kwargs):
pass
def CreateNURBSSphereOptions(*args, **kwargs):
pass
def ToggleFaceMetadata(*args, **kwargs):
pass
def WhatsNewStartupDialogOff(*args, **kwargs):
pass
def EnterEditModePress(*args, **kwargs):
pass
def nClothRemove(*args, **kwargs):
pass
def SmoothSkinWeights(*args, **kwargs):
pass
def WedgePolygon(*args, **kwargs):
pass
def DefaultQualityDisplay(*args, **kwargs):
pass
def HideJoints(*args, **kwargs):
pass
def nClothDeleteHistoryOpt(*args, **kwargs):
pass
def UniformOptions(*args, **kwargs):
pass
def PanZoomTool(*args, **kwargs):
pass
def AddMissingFBIKEffectorsOptions(*args, **kwargs):
pass
def nClothCreateOptions(*args, **kwargs):
pass
def SelectSimilar(*args, **kwargs):
pass
def SelectAllSubdivGeometry(*args, **kwargs):
pass
def VisualizeMetadataOptions(*args, **kwargs):
pass
def HideStrokePathCurves(*args, **kwargs):
pass
def EnableAll(*args, **kwargs):
pass
def PaintReduceWeightsToolOptions(*args, **kwargs):
pass
def HypershadeAddOnNodeCreate(*args, **kwargs):
pass
def UseSelectedEmitter(*args, **kwargs):
pass
def PaintEffectsPanel(*args, **kwargs):
pass
def RigidBindSkinOptions(*args, **kwargs):
pass
def NodeEditorRestoreLastClosedTab(*args, **kwargs):
pass
def Turbulence(*args, **kwargs):
pass
def SculptGeometryToolOptions(*args, **kwargs):
pass
def ResampleCurve(*args, **kwargs):
pass
def U3DBrushPressureOn(*args, **kwargs):
pass
def SelectAllSculptObjects(*args, **kwargs):
pass
def ScaleToolWithSnapMarkingMenuPopDown(*args, **kwargs):
pass
def ToggleKeepWireCulling(*args, **kwargs):
pass
def TransferAttributes(*args, **kwargs):
pass
def PolygonBooleanUnion(*args, **kwargs):
pass
def LightCentricLightLinkingEditor(*args, **kwargs):
pass
def CreateConstraintClip(*args, **kwargs):
pass
def SelectLightsIlluminatingObject(*args, **kwargs):
pass
def ScaleConstraintOptions(*args, **kwargs):
pass
def TransformNoSelectOffTool(*args, **kwargs):
pass
def DisplayIntermediateObjects(*args, **kwargs):
pass
def HypershadeGridToggleSnap(*args, **kwargs):
pass
def TexSewDeactivateBrushSize(*args, **kwargs):
pass
def AttachCurve(*args, **kwargs):
pass
def BufferCurveSnapshot(*args, **kwargs):
pass
def HypergraphDecreaseDepth(*args, **kwargs):
pass
def SaveScene(*args, **kwargs):
pass
def AutobindContainerOptions(*args, **kwargs):
pass
def AttachSubdivSurfaceOptions(*args, **kwargs):
pass
def FluidEmitterOptions(*args, **kwargs):
pass
def TexSculptDeactivateBrushSize(*args, **kwargs):
pass
def HideNRigids(*args, **kwargs):
pass
def ToggleUVs(*args, **kwargs):
pass
def STRSTweakModeToggle(*args, **kwargs):
pass
def FreeTangentWeight(*args, **kwargs):
pass
def IntersectSurfaces(*args, **kwargs):
pass
def TesselateSubdivSurfaceOptions(*args, **kwargs):
pass
def CreatePolygonCone(*args, **kwargs):
pass
def SurfaceBooleanUnionTool(*args, **kwargs):
pass
def RotateUVs(*args, **kwargs):
pass
def DetachSurfaces(*args, **kwargs):
pass
def SplitPolygonToolOptions(*args, **kwargs):
pass
def ProfilerToolShowSelected(*args, **kwargs):
pass
def geometryCacheOpt(*args, **kwargs):
pass
def NormalConstraintOptions(*args, **kwargs):
pass
def RemoveInbetween(*args, **kwargs):
pass
def DeleteKeys(*args, **kwargs):
pass
def HypershadeCreateNewTab(*args, **kwargs):
pass
def AssumePreferredAngleOptions(*args, **kwargs):
pass
def DeltaMushOptions(*args, **kwargs):
pass
def ArtPaintSkinWeightsTool(*args, **kwargs):
pass
def PreviousManipulatorHandle(*args, **kwargs):
pass
def ArcLengthTool(*args, **kwargs):
pass
def CreatePolygonPrismOptions(*args, **kwargs):
pass
def SelectAllNRigids(*args, **kwargs):
pass
def ToggleNormals(*args, **kwargs):
pass
def ArtPaintAttrToolOptions(*args, **kwargs):
pass
def IPROptions(*args, **kwargs):
pass
def ConvertSelectionToFaces(*args, **kwargs):
pass
def ShowMeshImprintToolOptions(*args, **kwargs):
pass
def SculptSubdivsTool(*args, **kwargs):
pass
def SplitPolygonTool(*args, **kwargs):
pass
def ArchiveScene(*args, **kwargs):
pass
def CoarserSubdivLevel(*args, **kwargs):
pass
def DeleteAllWires(*args, **kwargs):
pass
def PublishRootTransform(*args, **kwargs):
pass
def AlignSurfaces(*args, **kwargs):
pass
def CreatePlatonicSolidOptions(*args, **kwargs):
pass
def ShowManipulators(*args, **kwargs):
pass
def CreateVolumeLight(*args, **kwargs):
pass
def GraphCopyOptions(*args, **kwargs):
pass
def ToggleIKSolvers(*args, **kwargs):
pass
def SetInitialStateOptions(*args, **kwargs):
pass
def ShowLastHidden(*args, **kwargs):
pass
def CreateNURBSCone(*args, **kwargs):
pass
def HypershadeToggleAttrFilter(*args, **kwargs):
pass
def SetFullBodyIKKeysKeyToPin(*args, **kwargs):
pass
def HypershadeSelectLights(*args, **kwargs):
pass
def DeleteAllJoints(*args, **kwargs):
pass
def HypershadeDeleteAllShadingGroupsAndMaterials(*args, **kwargs):
pass
def SmoothBindSkinOptions(*args, **kwargs):
pass
def NodeEditorGraphAllShapes(*args, **kwargs):
pass
def ToggleFkSkeletonVisibility(*args, **kwargs):
pass
def SetFocusToCommandLine(*args, **kwargs):
pass
def ShowFkSkeleton(*args, **kwargs):
pass
def CylindricalProjectionOptions(*args, **kwargs):
pass
def HideFkSkeleton(*args, **kwargs):
pass
def HypershadeCreatePSDFile(*args, **kwargs):
pass
def NamespaceEditor(*args, **kwargs):
pass
def SetDrivenKey(*args, **kwargs):
pass
def ShowClusters(*args, **kwargs):
pass
def AddHolderOptions(*args, **kwargs):
pass
def ToggleBackfaceGeometry(*args, **kwargs):
pass
def PaintOnPaintableObjects(*args, **kwargs):
pass
def ToggleVisibilityAndKeepSelectionOptions(*args, **kwargs):
pass
def EditFluidResolutionOptions(*args, **kwargs):
pass
def HypershadeRefreshSelectedSwatches(*args, **kwargs):
pass
def SendAsNewScene3dsMax(*args, **kwargs):
pass
def ReverseCurve(*args, **kwargs):
pass
def geometryReplaceCacheFrames(*args, **kwargs):
pass
def ToggleVertices(*args, **kwargs):
pass
def Birail2(*args, **kwargs):
pass
def CreateClipOptions(*args, **kwargs):
pass
def SelectUVMask(*args, **kwargs):
pass
def CreateVolumeLightOptions(*args, **kwargs):
pass
def ConvertSelectionToContainedEdges(*args, **kwargs):
pass
def ToggleUnsharedUVs(*args, **kwargs):
pass
def NURBSSmoothnessMediumOptions(*args, **kwargs):
pass
def MakeBrushSpringOptions(*args, **kwargs):
pass
def FlareOptions(*args, **kwargs):
pass
def PolySelectToolOptions(*args, **kwargs):
pass
def SelectPreviousObjectsMotionBuilder(*args, **kwargs):
pass
def ParentBaseWire(*args, **kwargs):
pass
def Duplicate(*args, **kwargs):
pass
def Birail2Options(*args, **kwargs):
pass
def CharacterAnimationEditor(*args, **kwargs):
pass
def NCreateEmitterOptions(*args, **kwargs):
pass
def DisableGlobalStitch(*args, **kwargs):
pass
def HypershadeGraphDownstream(*args, **kwargs):
pass
def BatchBake(*args, **kwargs):
pass
def HideUnselectedCVs(*args, **kwargs):
pass
def AttachSurfacesOptions(*args, **kwargs):
pass
def PresetBlendingWindow(*args, **kwargs):
pass
def CollapseSubdivSurfaceHierarchy(*args, **kwargs):
pass
def RemoveConstraintTargetOptions(*args, **kwargs):
pass
def ToggleSurfaceFaceCenters(*args, **kwargs):
pass
def CreateHairCacheOptions(*args, **kwargs):
pass
def InsertKnot(*args, **kwargs):
pass
def RemoveBindingSet(*args, **kwargs):
pass
def fluidMergeCache(*args, **kwargs):
pass
def SubdivToNURBSOptions(*args, **kwargs):
pass
def AppendToHairCache(*args, **kwargs):
pass
def IncreaseExposureFine(*args, **kwargs):
pass
def HypershadeDisplayAsLargeSwatches(*args, **kwargs):
pass
def DeleteFBIKAllKeys(*args, **kwargs):
pass
def fluidDeleteCache(*args, **kwargs):
pass
def NodeEditorToggleUseAssetsAndPublishedAttributes(*args, **kwargs):
pass
def CreateShot(*args, **kwargs):
pass
def RemoveJoint(*args, **kwargs):
pass
def HypershadeCloseAllTabs(*args, **kwargs):
pass
def AssignHairConstraintOptions(*args, **kwargs):
pass
def CreateReference(*args, **kwargs):
pass
def ShowSurfaceCVs(*args, **kwargs):
pass
def TogglePolyNonPlanarFaceDisplay(*args, **kwargs):
pass
def AlignSurfacesOptions(*args, **kwargs):
pass
def CreatePolygonCylinder(*args, **kwargs):
pass
def Fireworks(*args, **kwargs):
pass
def ShowSmoothSkinInfluences(*args, **kwargs):
pass
def AirOptions(*args, **kwargs):
pass
def PickWalkOut(*args, **kwargs):
pass
def SelectAllImagePlanes(*args, **kwargs):
pass
def ConnectToTime(*args, **kwargs):
pass
def DeleteHistory(*args, **kwargs):
pass
def ShowResultsOptions(*args, **kwargs):
pass
def SculptMeshDeactivateBrushSize(*args, **kwargs):
pass
def SoloLastOutput(*args, **kwargs):
pass
def DeleteAllParticles(*args, **kwargs):
pass
def AddToCurrentScene3dsMax(*args, **kwargs):
pass
def ShowNonlinears(*args, **kwargs):
pass
def Group(*args, **kwargs):
pass
def HypershadeTransferAttributeValues(*args, **kwargs):
pass
def AddWrapInfluence(*args, **kwargs):
pass
def CreateOceanOptions(*args, **kwargs):
pass
def nucleusGetEffectsAsset(*args, **kwargs):
pass
def SetKeyOptions(*args, **kwargs):
pass
def CreateActiveRigidBody(*args, **kwargs):
pass
def ShowMeshSmearToolOptions(*args, **kwargs):
pass
def ExportOfflineFile(*args, **kwargs):
pass
def HypershadeShowCustomAttrs(*args, **kwargs):
pass
def ConvertSelectionToVertexFaces(*args, **kwargs):
pass
def Snap2PointsTo2Points(*args, **kwargs):
pass
def OptimizeSceneOptions(*args, **kwargs):
pass
def DeleteAllConstraints(*args, **kwargs):
pass
def Sine(*args, **kwargs):
pass
def NodeEditorCloseAllTabs(*args, **kwargs):
pass
def AutoPaintMarkingMenuPopDown(*args, **kwargs):
pass
def AddPondBoatLocator(*args, **kwargs):
pass
def OutlinerCollapseAllItems(*args, **kwargs):
pass
def ToggleCustomNURBSComponents(*args, **kwargs):
pass
def clearNClothStartState(*args, **kwargs):
pass
def EnableMemoryCaching(*args, **kwargs):
pass
def PaintEffectsTool(*args, **kwargs):
pass
def RotateToolWithSnapMarkingMenuPopDown(*args, **kwargs):
pass
def DuplicateNURBSPatchesOptions(*args, **kwargs):
pass
def HypershadePickWalkLeft(*args, **kwargs):
pass
def BrushAnimationMarkingMenu(*args, **kwargs):
pass
def OrientConstraintOptions(*args, **kwargs):
pass
def ResetTransformationsOptions(*args, **kwargs):
pass
def ToggleReflection(*args, **kwargs):
pass
def BestPlaneTexturingTool(*args, **kwargs):
pass
def CurveFlow(*args, **kwargs):
pass
def TranslateToolWithSnapMarkingMenu(*args, **kwargs):
pass
def PolygonCopy(*args, **kwargs):
pass
def Loft(*args, **kwargs):
pass
def CreateCreaseSet(*args, **kwargs):
pass
def RerootSkeleton(*args, **kwargs):
pass
def SelectAllBrushes(*args, **kwargs):
pass
def AddAnimationOffset(*args, **kwargs):
pass
def ModifyUpperRadiusRelease(*args, **kwargs):
pass
def HIKSetFullBodyKey(*args, **kwargs):
pass
def CreateBezierCurveToolOptions(*args, **kwargs):
pass
def DisplaySmoothShaded(*args, **kwargs):
pass
def HypershadeMoveTabRight(*args, **kwargs):
pass
def RenderViewWindow(*args, **kwargs):
pass
def TogglePanZoomPress(*args, **kwargs):
pass
def ModifyStampDepthPress(*args, **kwargs):
pass
def CenterPivot(*args, **kwargs):
pass
def HypershadeDuplicateWithoutNetwork(*args, **kwargs):
pass
def BakeCustomPivot(*args, **kwargs):
pass
def CreateUVShellAlongBorder(*args, **kwargs):
pass
def ToggleObjectDetails(*args, **kwargs):
pass
def RenderPassSetEditor(*args, **kwargs):
pass
def HideSculptObjects(*args, **kwargs):
pass
def ToggleViewAxis(*args, **kwargs):
pass
def ModifyLowerRadiusRelease(*args, **kwargs):
pass
def CreateShotOptions(*args, **kwargs):
pass
def KeyframeTangentMarkingMenuPopDown(*args, **kwargs):
pass
def RenderGlobalsWindow(*args, **kwargs):
pass
def PreviousGreasePencilFrame(*args, **kwargs):
pass
def CreateFBIK(*args, **kwargs):
pass
def InsertEdgeLoopTool(*args, **kwargs):
pass
def DisableFluids(*args, **kwargs):
pass
def MirrorSkinWeightsOptions(*args, **kwargs):
pass
def SubdivSmoothnessMedium(*args, **kwargs):
pass
def RemoveSubdivProxyMirror(*args, **kwargs):
pass
def DeleteFBIKSelectedKeys(*args, **kwargs):
pass
def DeleteStaticChannels(*args, **kwargs):
pass
def HypershadeDeleteAllTextures(*args, **kwargs):
pass
def AddPondDynamicLocatorOptions(*args, **kwargs):
pass
def NodeEditorToggleAttrFilter(*args, **kwargs):
pass
def RedoViewChange(*args, **kwargs):
pass
def PublishConnectionsOptions(*args, **kwargs):
pass
def DeleteAllFurs(*args, **kwargs):
pass
def MoveRotateScaleToolToggleSnapMode(*args, **kwargs):
pass
def CreatePolygonTool(*args, **kwargs):
pass
def PolyMergeEdgesOptions(*args, **kwargs):
pass
def TogglePanZoomRelease(*args, **kwargs):
pass
def HypershadeSortByType(*args, **kwargs):
pass
def PublishAttributes(*args, **kwargs):
pass
def EditCharacterAttributes(*args, **kwargs):
pass
def FBIKReachKeyingOptionIK(*args, **kwargs):
pass
def ImportSkinWeightMaps(*args, **kwargs):
pass
def PolyExtrudeOptions(*args, **kwargs):
pass
def PerspGraphOutlinerLayout(*args, **kwargs):
pass
def SelectAllCameras(*args, **kwargs):
pass
def Squash(*args, **kwargs):
pass
def CommandWindow(*args, **kwargs):
pass
def ShowBatchRender(*args, **kwargs):
pass
def PolyExtrude(*args, **kwargs):
pass
def SnapToCurve(*args, **kwargs):
pass
def Radial(*args, **kwargs):
pass
def ModifyStampDepthRelease(*args, **kwargs):
pass
def ProjectTangentOptions(*args, **kwargs):
pass
def GraphPasteOptions(*args, **kwargs):
pass
def AddFaceDivisions(*args, **kwargs):
pass
def PolyBrushMarkingMenuPopDown(*args, **kwargs):
pass
def CreateNURBSPlaneOptions(*args, **kwargs):
pass
def ExtendFluid(*args, **kwargs):
pass
def SetMeshWaxTool(*args, **kwargs):
pass
def CreaseProxyEdgeToolOptions(*args, **kwargs):
pass
def EnableSnapshots(*args, **kwargs):
pass
def HypershadeSetLargeNodeSwatchSize(*args, **kwargs):
pass
def SetMeshScrapeTool(*args, **kwargs):
pass
def SmoothProxy(*args, **kwargs):
pass
def HideLattices(*args, **kwargs):
pass
def PointOnCurveOptions(*args, **kwargs):
pass
def HideIKHandles(*args, **kwargs):
pass
def ShowAllEditedComponents(*args, **kwargs):
pass
def NextKey(*args, **kwargs):
pass
def HypershadeExportSelectedNetwork(*args, **kwargs):
pass
def SelectEdgeRingSp(*args, **kwargs):
pass
def SetMeshFlattenTool(*args, **kwargs):
pass
def ToggleColorFeedback(*args, **kwargs):
pass
def SelectCurveCVsLast(*args, **kwargs):
pass
def SetKeyVertexColor(*args, **kwargs):
pass
def EmitFromObjectOptions(*args, **kwargs):
pass
def HypershadeRenderTextureRange(*args, **kwargs):
pass
def PaintEffectsGlobalSettings(*args, **kwargs):
pass
def SelectContainerContents(*args, **kwargs):
pass
def HypershadeOpenOutlinerWindow(*args, **kwargs):
pass
def NodeEditorRenameActiveTab(*args, **kwargs):
pass
def CutKeysOptions(*args, **kwargs):
pass
def Help(*args, **kwargs):
pass
def ShotPlaylistEditor(*args, **kwargs):
pass
def TwoStackedViewArrangement(*args, **kwargs):
pass
def SelectBorderEdgeTool(*args, **kwargs):
pass
def CreateWake(*args, **kwargs):
pass
def MakeFluidCollideOptions(*args, **kwargs):
pass
def PolygonBooleanIntersection(*args, **kwargs):
pass
def SelectSurfacePointsMask(*args, **kwargs):
pass
def HypershadePickWalkRight(*args, **kwargs):
pass
def BrushPresetBlendShadingOff(*args, **kwargs):
pass
def ParticleFill(*args, **kwargs):
pass
def DuplicateNURBSPatches(*args, **kwargs):
pass
def SelectAllOutput(*args, **kwargs):
pass
def TangentsSpline(*args, **kwargs):
pass
def ClearCurrentCharacterList(*args, **kwargs):
pass
def DisconnectJoint(*args, **kwargs):
pass
def HypershadeGraphUpDownstream(*args, **kwargs):
pass
def ShatterOptions(*args, **kwargs):
pass
def BrushPresetReplaceShading(*args, **kwargs):
pass
def HyperGraphPanelUndoViewChange(*args, **kwargs):
pass
def DeleteChannels(*args, **kwargs):
pass
def BreakShadowLinks(*args, **kwargs):
pass
def AutoProjectionOptions(*args, **kwargs):
pass
def CreateSubdivSphere(*args, **kwargs):
pass
def CreateSoftBodyOptions(*args, **kwargs):
pass
def MergeMultipleEdgesOptions(*args, **kwargs):
pass
def HideNCloths(*args, **kwargs):
pass
def CreateJiggleOptions(*args, **kwargs):
pass
def BothProxySubdivDisplay(*args, **kwargs):
pass
def ToggleFaceNormals(*args, **kwargs):
pass
def MediumQualityDisplay(*args, **kwargs):
pass
def CreateDiskCache(*args, **kwargs):
pass
def SurfaceBooleanSubtractTool(*args, **kwargs):
pass
def DetachSkin(*args, **kwargs):
pass
def MakeUVInstanceCurrent(*args, **kwargs):
pass
def StraightenUVBorder(*args, **kwargs):
pass
def NonWeightedTangents(*args, **kwargs):
pass
def RemoveFromContainer(*args, **kwargs):
pass
def MakePondMotorBoats(*args, **kwargs):
pass
def HypershadeCreateAsset(*args, **kwargs):
pass
def AssignTemplateOptions(*args, **kwargs):
pass
def ReattachSkeletonJoints(*args, **kwargs):
pass
def ToggleScalePivots(*args, **kwargs):
pass
def CreatePolygonPlaneOptions(*args, **kwargs):
pass
def FlipTubeDirection(*args, **kwargs):
pass
def mrShaderManager(*args, **kwargs):
pass
def setNClothStartState(*args, **kwargs):
pass
def PlaceFullBodyPivot(*args, **kwargs):
pass
def SelectAllNCloths(*args, **kwargs):
pass
def Extrude(*args, **kwargs):
pass
def IKSplineHandleTool(*args, **kwargs):
pass
def ConvertSelectionToEdgePerimeter(*args, **kwargs):
pass
def SculptReferenceVectorMarkingMenuPress(*args, **kwargs):
pass
def SplitMeshWithProjectedCurve(*args, **kwargs):
pass
def ShowCameraManipulators(*args, **kwargs):
pass
def SmoothCurve(*args, **kwargs):
pass
def CoarseLevelComponentDisplay(*args, **kwargs):
pass
def HypershadeRefreshSelectedSwatchesOnDisk(*args, **kwargs):
pass
def HardwareRenderBuffer(*args, **kwargs):
pass
def CurveSmoothnessRough(*args, **kwargs):
pass
def PublishParentAnchor(*args, **kwargs):
pass
def ShowAnimationUI(*args, **kwargs):
pass
def AlignCurve(*args, **kwargs):
pass
def CreatePassiveRigidBody(*args, **kwargs):
pass
def GoalOptions(*args, **kwargs):
pass
def ToggleIKAllowRotation(*args, **kwargs):
pass
def CreateNURBSCircle(*args, **kwargs):
pass
def SelectCVsMask(*args, **kwargs):
pass
def HypershadeTestTexture(*args, **kwargs):
pass
def ShareUVInstances(*args, **kwargs):
pass
def CopyKeys(*args, **kwargs):
pass
def UpdateBindingSetOptions(*args, **kwargs):
pass
def SmoothPolygon(*args, **kwargs):
pass
def HypershadeSelectConnected(*args, **kwargs):
pass
def DeleteAllIKHandles(*args, **kwargs):
pass
def SetWorkingFrame(*args, **kwargs):
pass
def SmokeOptions(*args, **kwargs):
pass
def NodeEditorExtendToShapes(*args, **kwargs):
pass
def UnpublishRootTransform(*args, **kwargs):
pass
def CycleThroughCameras(*args, **kwargs):
pass
def HideDeformingGeometry(*args, **kwargs):
pass
def SetTimecode(*args, **kwargs):
pass
def UnparentOptions(*args, **kwargs):
pass
def NodeEditorShowAllCustomAttrs(*args, **kwargs):
pass
def AddFloorContactPlane(*args, **kwargs):
pass
def EnableSelectedIKHandles(*args, **kwargs):
pass
def SetRigidBodyCollision(*args, **kwargs):
pass
def CreateConstraintOptions(*args, **kwargs):
pass
def ToggleAutoSmooth(*args, **kwargs):
pass
def PaintGrid(*args, **kwargs):
pass
def UnlockCurveLength(*args, **kwargs):
pass
def SetShrinkWrapInnerObject(*args, **kwargs):
pass
def NodeEditorSetTraversalDepthZero(*args, **kwargs):
pass
def HypershadeRefreshAllSwatchesOnDisk(*args, **kwargs):
pass
def HypershadePerspLayout(*args, **kwargs):
pass
def UnitizeUVs(*args, **kwargs):
pass
def OutlinerToggleSetMembers(*args, **kwargs):
pass
def RetimeKeysTool(*args, **kwargs):
pass
def NodeEditorIncreaseTraversalDepth(*args, **kwargs):
pass
def CurveUtilitiesMarkingMenuPopDown(*args, **kwargs):
pass
def CreateCameraOnlyOptions(*args, **kwargs):
pass
def TruncateHairCache(*args, **kwargs):
pass
def CreateWrap(*args, **kwargs):
pass
def MakeBoatsOptions(*args, **kwargs):
pass
def UndoCanvas(*args, **kwargs):
pass
def NodeEditorPickWalkRight(*args, **kwargs):
pass
def SelectPolygonToolMarkingMenuPopDown(*args, **kwargs):
pass
def AddBoatLocatorOptions(*args, **kwargs):
pass
def RotateTool(*args, **kwargs):
pass
def Drag(*args, **kwargs):
pass
def SwapBlendShape(*args, **kwargs):
pass
def ChangeVertexSize(*args, **kwargs):
pass
def HypershadeGraphAddSelected(*args, **kwargs):
pass
def BakeTopologyToTargets(*args, **kwargs):
pass
def DuplicateFaceOptions(*args, **kwargs):
pass
def NodeEditorGraphRemoveUnselected(*args, **kwargs):
pass
def HideTexturePlacements(*args, **kwargs):
pass
def CreateSpring(*args, **kwargs):
pass
def GetHairExample(*args, **kwargs):
pass
def UnlockContainer(*args, **kwargs):
pass
def CreateHair(*args, **kwargs):
pass
def FlowPathObjectOptions(*args, **kwargs):
pass
def InsertKeysTool(*args, **kwargs):
pass
def LockCurveLength(*args, **kwargs):
pass
def SubdivSurfacePolygonProxyMode(*args, **kwargs):
pass
def DeleteVertex(*args, **kwargs):
pass
def ThreeTopSplitViewArrangement(*args, **kwargs):
pass
def NodeEditorToggleShowNamespace(*args, **kwargs):
pass
def RelaxInitialStateOptions(*args, **kwargs):
pass
def AddKeyToolDeactivate(*args, **kwargs):
pass
def HypershadeAutoSizeNodes(*args, **kwargs):
pass
def CreatePoseOptions(*args, **kwargs):
pass
def TogglePolyDisplayLimitToSelected(*args, **kwargs):
pass
def DisableSnapshots(*args, **kwargs):
pass
def CreatePolygonCube(*args, **kwargs):
pass
def Fire(*args, **kwargs):
pass
def InTangentSpline(*args, **kwargs):
pass
def PickWalkIn(*args, **kwargs):
pass
def HypershadeToggleZoomIn(*args, **kwargs):
pass
def OutTangentPlateau(*args, **kwargs):
pass
def DeleteHair(*args, **kwargs):
pass
def NodeEditorShowAllAttrs(*args, **kwargs):
pass
def DeleteAllNRigids(*args, **kwargs):
pass
def AppendToPolygonToolOptions(*args, **kwargs):
pass
def PasteSelected(*args, **kwargs):
pass
def GridUV(*args, **kwargs):
pass
def WrinkleTool(*args, **kwargs):
pass
def CreateNodeWindow(*args, **kwargs):
pass
def ExtractSubdivSurfaceVerticesOptions(*args, **kwargs):
pass
def ParticleTool(*args, **kwargs):
pass
def NodeEditorPinByDefault(*args, **kwargs):
pass
def ToggleFkIk(*args, **kwargs):
pass
def Create3DContainerEmitterOptions(*args, **kwargs):
pass
def ExportDeformerWeights(*args, **kwargs):
pass
def HypershadeShowAllAttrs(*args, **kwargs):
pass
def Ungroup(*args, **kwargs):
pass
def ParticleCollisionEvents(*args, **kwargs):
pass
def ConvertSelectionToUVShell(*args, **kwargs):
pass
def SmoothingLevelDecrease(*args, **kwargs):
pass
def WedgePolygonOptions(*args, **kwargs):
pass
def DeleteAllClips(*args, **kwargs):
pass
def nClothMakeCollide(*args, **kwargs):
pass
def HyperGraphPanelRedoViewChange(*args, **kwargs):
pass
def NodeEditorAutoSizeNodes(*args, **kwargs):
pass
def WarpImage(*args, **kwargs):
pass
def HairUVSetLinkingEditor(*args, **kwargs):
pass
def MergeEdgeToolOptions(*args, **kwargs):
pass
def PanePop(*args, **kwargs):
pass
def nClothDeleteCacheFrames(*args, **kwargs):
pass
def VolumeAxis(*args, **kwargs):
pass
def ToggleCurrentContainerHud(*args, **kwargs):
pass
def PaintSetMembershipTool(*args, **kwargs):
pass
def EnableGlobalStitch(*args, **kwargs):
pass
def UnfoldUV(*args, **kwargs):
pass
def SelectPreviousObjectsMudbox(*args, **kwargs):
pass
def ThreePointArcToolOptions(*args, **kwargs):
pass
def PaintEffectsToPoly(*args, **kwargs):
pass
def RotateToolOptions(*args, **kwargs):
pass
def UndoViewChange(*args, **kwargs):
pass
def NodeEditorSetSmallNodeSwatchSize(*args, **kwargs):
pass
def SculptMeshActivateBrushSize(*args, **kwargs):
pass
def TurbulenceOptions(*args, **kwargs):
pass
def UpdateCurrentSceneMotionBuilder(*args, **kwargs):
pass
def ResetTemplateBrush(*args, **kwargs):
pass
def UVCentricUVLinkingEditor(*args, **kwargs):
pass
def GetFBIKExample(*args, **kwargs):
pass
def RotateToolWithSnapMarkingMenu(*args, **kwargs):
pass
def HideNURBSSurfaces(*args, **kwargs):
pass
def CurveFillet(*args, **kwargs):
pass
def ScriptEditor(*args, **kwargs):
pass
def SelectNextIntermediatObject(*args, **kwargs):
pass
def PolygonCollapseEdges(*args, **kwargs):
pass
def CreateSubCharacter(*args, **kwargs):
pass
def GameExporterWnd(*args, **kwargs):
pass
def LockNormals(*args, **kwargs):
pass
def PasteKeys(*args, **kwargs):
pass
def SelectMeshUVShell(*args, **kwargs):
pass
def ActivateGlobalScreenSliderModeMarkingMenu(*args, **kwargs):
pass
def ScaleCurvature(*args, **kwargs):
pass
def CreateAreaLightOptions(*args, **kwargs):
pass
def HypershadeMoveTabDown(*args, **kwargs):
pass
def TextureCentricUVLinkingEditor(*args, **kwargs):
pass
def CameraSetEditor(*args, **kwargs):
pass
def SaveSceneAs(*args, **kwargs):
pass
def BakeChannel(*args, **kwargs):
pass
def CreateTextureDeformerOptions(*args, **kwargs):
pass
def TexSculptDeactivateBrushStrength(*args, **kwargs):
pass
def NodeEditorPickWalkDown(*args, **kwargs):
pass
def HidePlanes(*args, **kwargs):
pass
def ToggleVertexNormalDisplay(*args, **kwargs):
pass
def SaveBrushPreset(*args, **kwargs):
pass
def CreateSetOptions(*args, **kwargs):
pass
def FullCreaseSubdivSurface(*args, **kwargs):
pass
def KeyBlendShapeTargetsWeight(*args, **kwargs):
pass
def TestTexture(*args, **kwargs):
pass
def PreInfinityCycleOffset(*args, **kwargs):
pass
def CreateExpressionClip(*args, **kwargs):
pass
def RoundTool(*args, **kwargs):
pass
def AssignTemplate(*args, **kwargs):
pass
def InitialFluidStates(*args, **kwargs):
pass
def DisableConstraints(*args, **kwargs):
pass
def ProfilerToolShowSelectedRepetition(*args, **kwargs):
pass
def SubdivSmoothnessHull(*args, **kwargs):
pass
def OffsetCurveOnSurfaceOptions(*args, **kwargs):
pass
def DeleteRigidBodies(*args, **kwargs):
pass
def HypershadeDeleteAllLights(*args, **kwargs):
pass
def ProfilerToolHideSelected(*args, **kwargs):
pass
def NodeEditorGridToggleVisibility(*args, **kwargs):
pass
def HypershadeShapeMenuStateNoShapes(*args, **kwargs):
pass
def RedoPreviousIPRRender(*args, **kwargs):
pass
def BlindDataEditor(*args, **kwargs):
pass
def PreviousViewArrangement(*args, **kwargs):
pass
def CreatePolygonSphere(*args, **kwargs):
pass
def FlowPathObject(*args, **kwargs):
pass
def TogglePaintOnPaintableObjects(*args, **kwargs):
pass
def PlaybackStop(*args, **kwargs):
pass
def ArtPaintBlendShapeWeightsTool(*args, **kwargs):
pass
def PrevSkinPaintMode(*args, **kwargs):
pass
def SurfaceFlow(*args, **kwargs):
pass
def ExtrudeVertexOptions(*args, **kwargs):
pass
def ImportDeformerWeightsOptions(*args, **kwargs):
pass
def ShowMeshKnifeToolOptions(*args, **kwargs):
pass
def cacheAppendOpt(*args, **kwargs):
pass
def PerspGraphHypergraphLayout(*args, **kwargs):
pass
def SelectAllAssets(*args, **kwargs):
pass
def PrefixHierarchyNames(*args, **kwargs):
pass
def OrientConstraint(*args, **kwargs):
pass
def DeleteCurrentColorSet(*args, **kwargs):
pass
def fluidReplaceCacheOpt(*args, **kwargs):
pass
def DeleteMemoryCaching(*args, **kwargs):
pass
def ShowMeshFoamyToolOptions(*args, **kwargs):
pass
def SnapPointToPoint(*args, **kwargs):
pass
def QualityDisplayMarkingMenuPopDown(*args, **kwargs):
pass
def CreateReferenceOptions(*args, **kwargs):
pass
def ModifyPaintValueRelease(*args, **kwargs):
pass
def BatchBakeOptions(*args, **kwargs):
pass
def ShowMarkers(*args, **kwargs):
pass
def ProjectCurveOnSurfaceOptions(*args, **kwargs):
pass
def GraphEditor(*args, **kwargs):
pass
def PostInfinityCycle(*args, **kwargs):
pass
def ExtendCurveOnSurfaceOptions(*args, **kwargs):
pass
def ToggleFBIKEffectorsPinState(*args, **kwargs):
pass
def CopyVertexSkinWeights(*args, **kwargs):
pass
def SetFullBodyIKKeysOptions(*args, **kwargs):
pass
def ShowHotbox(*args, **kwargs):
pass
def HypershadeSelectUpStream(*args, **kwargs):
pass
def NodeEditorGraphRearrange(*args, **kwargs):
pass
def ReversePolygonNormals(*args, **kwargs):
pass
def ExportSelection(*args, **kwargs):
pass
def ShowFluids(*args, **kwargs):
pass
def DecreaseGammaCoarse(*args, **kwargs):
pass
def HideHairSystems(*args, **kwargs):
pass
def NextFrame(*args, **kwargs):
pass
def SetDrivenKeyOptions(*args, **kwargs):
pass
def ShowDeformers(*args, **kwargs):
pass
def HIKFullBodyMode(*args, **kwargs):
pass
def MakeShadowLinks(*args, **kwargs):
pass
def SetBreakdownKey(*args, **kwargs):
pass
def EmitFluidFromObjectOptions(*args, **kwargs):
pass
def SendAsNewSceneMotionBuilder(*args, **kwargs):
pass
def PaintEffectPanelActivate(*args, **kwargs):
pass
def ReverseSurfaceDirectionOptions(*args, **kwargs):
pass
def HypershadeOpenGraphEditorWindow(*args, **kwargs):
pass
def SelectUVShell(*args, **kwargs):
pass
def TwoPointArcToolOptions(*args, **kwargs):
pass
def ConvertSelectionToContainedFaces(*args, **kwargs):
pass
def MakeCurvesDynamicOptions(*args, **kwargs):
pass
def PolygonBooleanDifference(*args, **kwargs):
pass
def ToggleToolSettings(*args, **kwargs):
pass
def LayoutUV(*args, **kwargs):
pass
def cacheAppend(*args, **kwargs):
pass
def DuplicateFace(*args, **kwargs):
pass
def ConnectComponentsOptions(*args, **kwargs):
pass
def TangentsLinear(*args, **kwargs):
pass
def CleanupPolygon(*args, **kwargs):
pass
def PartialCreaseSubdivSurface(*args, **kwargs):
pass
def DisableSelectedIKHandles(*args, **kwargs):
pass
def ShowMeshFlattenToolOptions(*args, **kwargs):
pass
def MoveTool(*args, **kwargs):
pass
def CreateSubdivPlane(*args, **kwargs):
pass
def GlobalStitchOptions(*args, **kwargs):
pass
def InsertIsoparms(*args, **kwargs):
pass
def HideLights(*args, **kwargs):
pass
def ToggleToolMessage(*args, **kwargs):
pass
def CreateImagePlaneOptions(*args, **kwargs):
pass
def FrameAll(*args, **kwargs):
pass
def InteractiveSplitTool(*args, **kwargs):
pass
def ClusterCurve(*args, **kwargs):
pass
def fluidMergeCacheOpt(*args, **kwargs):
pass
def MoveNormalToolOptions(*args, **kwargs):
pass
def SurfaceBooleanIntersectTool(*args, **kwargs):
pass
def RegionKeysTool(*args, **kwargs):
pass
def DetachSkeleton(*args, **kwargs):
pass
def ToggleAttributeEditor(*args, **kwargs):
pass
def NodeEditorWindow(*args, **kwargs):
pass
def AnimationSweep(*args, **kwargs):
pass
def SubdivSurfaceHierarchyMode(*args, **kwargs):
pass
def InTangentLinear(*args, **kwargs):
pass
def ShowNCloths(*args, **kwargs):
pass
def NURBSSmoothnessFine(*args, **kwargs):
pass
def ReassignBoneLatticeJoint(*args, **kwargs):
pass
def ShowTexturePlacements(*args, **kwargs):
pass
def CreatePolygonPipeOptions(*args, **kwargs):
pass
def ShowStrokeControlCurves(*args, **kwargs):
pass
def AlignCameraToPolygon(*args, **kwargs):
pass
def PixelMoveRight(*args, **kwargs):
pass
def SelectAllMarkingMenu(*args, **kwargs):
pass
def SnapKeysOptions(*args, **kwargs):
pass
def HypershadeGraphRemoveUnselected(*args, **kwargs):
pass
def IKHandleTool(*args, **kwargs):
pass
def OutlinerRenameSelectedItem(*args, **kwargs):
pass
def ShowRiggingUI(*args, **kwargs):
pass
def SculptPolygonsTool(*args, **kwargs):
pass
def SplitEdgeRingTool(*args, **kwargs):
pass
def DeleteAllShadingGroupsAndMaterials(*args, **kwargs):
pass
def ShowObjectGeometry(*args, **kwargs):
pass
def AddToCurrentSceneMotionBuilder(*args, **kwargs):
pass
def PlayblastOptions(*args, **kwargs):
pass
def HypershadeTransferAttributeValuesOptions(*args, **kwargs):
pass
def HIKCycleMode(*args, **kwargs):
pass
def ShowManipulatorTool(*args, **kwargs):
pass
def ModifyConstraintAxis(*args, **kwargs):
pass
def CreatePartition(*args, **kwargs):
pass
def ShowNParticles(*args, **kwargs):
pass
def nucleusGetnClothExample(*args, **kwargs):
pass
def GoToWorkingFrame(*args, **kwargs):
pass
def ToggleHoleFaces(*args, **kwargs):
pass
def CreateNSoftBody(*args, **kwargs):
pass
def ExportProxyContainer(*args, **kwargs):
pass
def nucleusDisplayNComponentNodes(*args, **kwargs):
pass
def CreatePartitionOptions(*args, **kwargs):
pass
def OutlinerToggleIgnoreUseColor(*args, **kwargs):
pass
def HypershadeSelectBakeSets(*args, **kwargs):
pass
def BendCurves(*args, **kwargs):
pass
def SlideEdgeToolOptions(*args, **kwargs):
pass
def NodeEditorDeleteNodes(*args, **kwargs):
pass
def CycleFBIKReachKeyingOption(*args, **kwargs):
pass
def HideClusters(*args, **kwargs):
pass
def MergeVertexToolOptions(*args, **kwargs):
pass
def OutlinerCollapseAllSelectedItems(*args, **kwargs):
pass
def TextureViewWindow(*args, **kwargs):
pass
def MakeMotorBoats(*args, **kwargs):
pass
def FireworksOptions(*args, **kwargs):
pass
def EnableParticles(*args, **kwargs):
pass
def OutTangentFlat(*args, **kwargs):
pass
def ToggleAutoActivateBodyPart(*args, **kwargs):
pass
def PaintGeomCacheTool(*args, **kwargs):
pass
def PolygonSoftenEdge(*args, **kwargs):
pass
def SelectAllMarkingMenuPopDown(*args, **kwargs):
pass
def EPCurveTool(*args, **kwargs):
pass
def HypershadePublishConnections(*args, **kwargs):
pass
def OrientJoint(*args, **kwargs):
pass
def ResolveInterpenetrationOptions(*args, **kwargs):
pass
def geometryExportCacheOpt(*args, **kwargs):
pass
def BevelPolygon(*args, **kwargs):
pass
def AttachToPath(*args, **kwargs):
pass
def TrimTool(*args, **kwargs):
pass
def PolygonPasteOptions(*args, **kwargs):
pass
def LowQualityDisplay(*args, **kwargs):
pass
def PolyMergeVerticesOptions(*args, **kwargs):
pass
def AddBlendShapeOptions(*args, **kwargs):
pass
def MoveDown(*args, **kwargs):
pass
def CreateCameraAimOptions(*args, **kwargs):
pass
def ScaleToolMarkingMenu(*args, **kwargs):
pass
def ShowModelingUI(*args, **kwargs):
pass
def RepeatLast(*args, **kwargs):
pass
def ChangeNormalSize(*args, **kwargs):
pass
def HypershadeFrameAll(*args, **kwargs):
pass
def BakeSpringAnimation(*args, **kwargs):
pass
def RenderTextureRange(*args, **kwargs):
pass
def ToggleOppositeFlagOfSelectedShapes(*args, **kwargs):
pass
def ModifyOpacityPress(*args, **kwargs):
pass
def ShowLattices(*args, **kwargs):
pass
def ToggleWireframeInArtisan(*args, **kwargs):
pass
def ExportProxyContainerOptions(*args, **kwargs):
pass
def CreateSpotLight(*args, **kwargs):
pass
def ModifyOpacityRelease(*args, **kwargs):
pass
def SculptMeshDeactivateBrushStrength(*args, **kwargs):
pass
def ToggleShowResults(*args, **kwargs):
pass
def CreateGhost(*args, **kwargs):
pass
def ExportOfflineFileFromRefEd(*args, **kwargs):
pass
def InsertKeyToolActivate(*args, **kwargs):
pass
def RenameCurrentColorSet(*args, **kwargs):
pass
def WireToolOptions(*args, **kwargs):
pass
def MirrorSubdivSurface(*args, **kwargs):
pass
def OpenCloseCurveOptions(*args, **kwargs):
pass
def RemoveSubdivProxyMirrorOptions(*args, **kwargs):
pass
def DeleteTimeWarp(*args, **kwargs):
pass
def HypershadeDeleteUnusedNodes(*args, **kwargs):
pass
def AddPondSurfaceLocator(*args, **kwargs):
pass
def NodeEditorToggleNodeSwatchSize(*args, **kwargs):
pass
def RemoveMaterialSoloing(*args, **kwargs):
pass
def ShowMeshRelaxToolOptions(*args, **kwargs):
pass
def ArtPaintSkinWeightsToolOptions(*args, **kwargs):
pass
def CreatePondOptions(*args, **kwargs):
pass
def SplitEdgeRingToolOptions(*args, **kwargs):
pass
def PolyMergeOptions(*args, **kwargs):
pass
def TogglePolyDisplayHardEdges(*args, **kwargs):
pass
def FilletBlendToolOptions(*args, **kwargs):
pass
def AddOceanPreviewPlane(*args, **kwargs):
pass
def PolyExtrudeVertices(*args, **kwargs):
pass
def PickColorDeactivate(*args, **kwargs):
pass
def SelectAllFurs(*args, **kwargs):
pass
def PruneSculpt(*args, **kwargs):
pass
def attachFluidCache(*args, **kwargs):
pass
def AddKeysToolOptions(*args, **kwargs):
pass
def HypershadeToggleTransformDisplay(*args, **kwargs):
pass
def DeleteFBIKBodyPartKeys(*args, **kwargs):
pass
def PolyExtrudeEdges(*args, **kwargs):
pass
def SoftModDeformer(*args, **kwargs):
pass
def DecreaseManipulatorSize(*args, **kwargs):
pass
def DeleteAllChannels(*args, **kwargs):
pass
def DeleteAllNCloths(*args, **kwargs):
pass
def AddInfluence(*args, **kwargs):
pass
def AppendToHairCacheOptions(*args, **kwargs):
pass
def PolyCreaseToolOptions(*args, **kwargs):
pass
def DollyTool(*args, **kwargs):
pass
def GreasePencilTool(*args, **kwargs):
pass
def MirrorJointOptions(*args, **kwargs):
pass
def CreateNURBSTorus(*args, **kwargs):
pass
def ExtractFaceOptions(*args, **kwargs):
pass
def SetNClothStartFromMesh(*args, **kwargs):
pass
def Create3DContainer(*args, **kwargs):
pass
def PoleVectorConstraintOptions(*args, **kwargs):
pass
def Export(*args, **kwargs):
pass
def HypershadeShapeMenuStateAllExceptShadingGroupMembers(*args, **kwargs):
pass
def SetMeshSculptTool(*args, **kwargs):
pass
def PasteVertexSkinWeights(*args, **kwargs):
pass
def SmoothingDisplayShowBoth(*args, **kwargs):
pass
def PointOnPolyConstraint(*args, **kwargs):
pass
def DeleteAllCameras(*args, **kwargs):
pass
def HideLightManipulators(*args, **kwargs):
pass
def SetMeshKnifeTool(*args, **kwargs):
pass
def ShrinkPolygonSelectionRegion(*args, **kwargs):
pass
def UnitizeUVsOptions(*args, **kwargs):
pass
def HIKSetSelectionKey(*args, **kwargs):
pass
def MergeCharacterSet(*args, **kwargs):
pass
def SelectFacePath(*args, **kwargs):
pass
def SetMeshFoamyTool(*args, **kwargs):
pass
def ToggleCreaseVertices(*args, **kwargs):
pass
def SelectEdgeLoop(*args, **kwargs):
pass
def EnableExpressions(*args, **kwargs):
pass
def SetMaxInfluences(*args, **kwargs):
pass
def ThreeLeftSplitViewArrangement(*args, **kwargs):
pass
def RotateToolMarkingMenu(*args, **kwargs):
pass
def SelectContiguousEdges(*args, **kwargs):
pass
def SetKeyPath(*args, **kwargs):
pass
def HypershadeOpenUVEditorWindow(*args, **kwargs):
pass
def BreakTangent(*args, **kwargs):
pass
def PartitionEditor(*args, **kwargs):
pass
def AssignOfflineFileOptions(*args, **kwargs):
pass
def ResetReflectionOptions(*args, **kwargs):
pass
def geometryAppendCache(*args, **kwargs):
pass
def UIModeMarkingMenu(*args, **kwargs):
pass
def SelectBrushNames(*args, **kwargs):
pass
def CurlCurvesOptions(*args, **kwargs):
pass
def ToggleLocalRotationAxes(*args, **kwargs):
pass
def TransformPolygonComponent(*args, **kwargs):
pass
def CreateSubdivCylinder(*args, **kwargs):
pass
def SelectToolMarkingMenu(*args, **kwargs):
pass
def SelectAllStrokes(*args, **kwargs):
pass
def LockContainer(*args, **kwargs):
pass
def CreateConstructionPlane(*args, **kwargs):
pass
def SelectMaskToolMarkingMenu(*args, **kwargs):
pass
def ActivateFullBodyPivot(*args, **kwargs):
pass
def DisplayShaded(*args, **kwargs):
pass
def HypershadeImport(*args, **kwargs):
pass
def SelectAllNParticles(*args, **kwargs):
pass
def CVHardness(*args, **kwargs):
pass
def HypergraphWindow(*args, **kwargs):
pass
def HypershadeDisplayInterestingShapes(*args, **kwargs):
pass
def AverageVertex(*args, **kwargs):
pass
def CreateTextOptions(*args, **kwargs):
pass
def nClothReplaceFramesOpt(*args, **kwargs):
pass
def HideNonlinears(*args, **kwargs):
pass
def ToggleVertIDs(*args, **kwargs):
pass
def Boundary(*args, **kwargs):
pass
def WhatsNewStartupDialogOn(*args, **kwargs):
pass
def CreateSculptDeformerOptions(*args, **kwargs):
pass
def JointTool(*args, **kwargs):
pass
def CreateEmptyUVSet(*args, **kwargs):
pass
def ToggleFBIKEffectorsTranslatePinState(*args, **kwargs):
pass
def MapUVBorder(*args, **kwargs):
pass
def IncrementAndSave(*args, **kwargs):
pass
def DeviceEditor(*args, **kwargs):
pass
def BevelPolygonOptions(*args, **kwargs):
pass
def NURBSSmoothnessFineOptions(*args, **kwargs):
pass
def SubdivSmoothnessFine(*args, **kwargs):
pass
def OffsetCurve(*args, **kwargs):
pass
def nClothReplaceCacheOpt(*args, **kwargs):
pass
def DeleteMotionPaths(*args, **kwargs):
pass
def HypershadeDeleteAllBakeSets(*args, **kwargs):
pass
def AttachCurveOptions(*args, **kwargs):
pass
def RecentCommandsWindow(*args, **kwargs):
pass
def HypershadeConvertToFileTexture(*args, **kwargs):
pass
def MakeMotorBoatsOptions(*args, **kwargs):
pass
def MovePolygonComponent(*args, **kwargs):
pass
def CreatePolygonSoccerBall(*args, **kwargs):
pass
def FloatSelectedPondObjectsOptions(*args, **kwargs):
pass
def ToggleOriginAxis(*args, **kwargs):
pass
def PlaybackBackward(*args, **kwargs):
pass
def MakeHoleToolOptions(*args, **kwargs):
pass
def ExtrudeOptions(*args, **kwargs):
pass
def ConvertSelectionToUVBorder(*args, **kwargs):
pass
def SculptSurfacesToolOptions(*args, **kwargs):
pass
def SplitVertex(*args, **kwargs):
pass
def ShowCameras(*args, **kwargs):
pass
def ColorPreferencesWindow(*args, **kwargs):
pass
def SnapKeys(*args, **kwargs):
pass
def ShowAttributeEditorOrChannelBox(*args, **kwargs):
pass
def QuadrangulateOptions(*args, **kwargs):
pass
def Smoke(*args, **kwargs):
pass
def HypershadeReduceTraversalDepth(*args, **kwargs):
pass
def AlignUVOptions(*args, **kwargs):
pass
def CurveFlowOptions(*args, **kwargs):
pass
def ProjectCurveOnMeshOptions(*args, **kwargs):
pass
def GraphDelete(*args, **kwargs):
pass
def ShowAll(*args, **kwargs):
pass
def nClothAppend(*args, **kwargs):
pass
def CreateNURBSCubeOptions(*args, **kwargs):
pass
def ExtendCurve(*args, **kwargs):
pass
def Shatter(*args, **kwargs):
pass
def DragOptions(*args, **kwargs):
pass
def setDynStartState(*args, **kwargs):
pass
def HypershadeSelectShadingGroupsAndMaterials(*args, **kwargs):
pass
def SewUVs(*args, **kwargs):
pass
def SmoothHairCurves(*args, **kwargs):
pass
def NodeEditorGraphDownstream(*args, **kwargs):
pass
def UntemplateObject(*args, **kwargs):
pass
def CreateVolumeCone(*args, **kwargs):
pass
def DecreaseExposureCoarse(*args, **kwargs):
pass
def SetToFaceNormals(*args, **kwargs):
pass
def Newton(*args, **kwargs):
pass
def UnpublishAttributes(*args, **kwargs):
pass
def nClothDeleteCacheOpt(*args, **kwargs):
pass
def NodeEditorShowConnectedAttrs(*args, **kwargs):
pass
def MakePressureCurve(*args, **kwargs):
pass
def SetRigidBodyInterpenetration(*args, **kwargs):
pass
def PaintOperationMarkingMenuRelease(*args, **kwargs):
pass
def NodeEditorShapeMenuStateAll(*args, **kwargs):
pass
def EditTexture(*args, **kwargs):
pass
def HypershadeRemoveTab(*args, **kwargs):
pass
def CreateClusterOptions(*args, **kwargs):
pass
def PaintCacheTool(*args, **kwargs):
pass
def IncreaseGammaCoarse(*args, **kwargs):
pass
def NodeEditorSelectUpStream(*args, **kwargs):
pass
def ClosestPointOn(*args, **kwargs):
pass
def Birail3Options(*args, **kwargs):
pass
def CustomPolygonDisplayOptions(*args, **kwargs):
pass
def CreateCharacter(*args, **kwargs):
pass
def UngroupOptions(*args, **kwargs):
pass
def TwistOptions(*args, **kwargs):
pass
def CreateVolumeCube(*args, **kwargs):
pass
def MakeCollideOptions(*args, **kwargs):
pass
def CreateCameraAimUp(*args, **kwargs):
pass
def PolygonApplyColor(*args, **kwargs):
pass
def NodeEditorPickWalkUp(*args, **kwargs):
pass
def AddPondBoatLocatorOptions(*args, **kwargs):
pass
def LatticeDeformKeysToolOptions(*args, **kwargs):
pass
def DuplicateEdges(*args, **kwargs):
pass
def nucleusDisplayDynamicConstraintNodes(*args, **kwargs):
pass
def TangentsFixed(*args, **kwargs):
pass
def CircularFillet(*args, **kwargs):
pass
def RevolveOptions(*args, **kwargs):
pass
def DisableParticles(*args, **kwargs):
pass
def HypershadeGraphRemoveDownstream(*args, **kwargs):
pass
def BatchRenderOptions(*args, **kwargs):
pass
def NodeEditorGraphRemoveUpstream(*args, **kwargs):
pass
def HighQualityDisplay(*args, **kwargs):
pass
def AttributeEditor(*args, **kwargs):
pass
def CreateSubdivCube(*args, **kwargs):
pass
def GlobalDiskCacheControl(*args, **kwargs):
pass
def nClothReplaceCache(*args, **kwargs):
pass
def ToggleTextureBorderEdges(*args, **kwargs):
pass
def CreateIllustratorCurvesOptions(*args, **kwargs):
pass
def ToggleCVs(*args, **kwargs):
pass
def FourViewArrangement(*args, **kwargs):
pass
def InteractiveBindSkinOptions(*args, **kwargs):
pass
def SubstituteGeometry(*args, **kwargs):
pass
def VertexNormalEditTool(*args, **kwargs):
pass
def DetachCurveOptions(*args, **kwargs):
pass
def HypershadeDisplayAsSmallSwatches(*args, **kwargs):
pass
def StitchSurfacePoints(*args, **kwargs):
pass
def NodeEditorTransforms(*args, **kwargs):
pass
def RemoveBrushSharing(*args, **kwargs):
pass
def HypershadeConvertPSDToFileTexture(*args, **kwargs):
pass
def AssignOfflineFileFromRefEd(*args, **kwargs):
pass
def RandomizeFollicles(*args, **kwargs):
pass
def ToggleProxyDisplay(*args, **kwargs):
pass
def MakeCollideHair(*args, **kwargs):
pass
def CreatePolygonHelixOptions(*args, **kwargs):
pass
def FitBSplineOptions(*args, **kwargs):
pass
def LevelOfDetailGroup(*args, **kwargs):
pass
def SelectAllLattices(*args, **kwargs):
pass
def HypershadeUpdatePSDNetworks(*args, **kwargs):
pass
def ContourProjectionOptions(*args, **kwargs):
pass
def CreateUVsBasedOnCamera(*args, **kwargs):
pass
def SculptMeshInvertFreeze(*args, **kwargs):
pass
def SphericalProjectionOptions(*args, **kwargs):
pass
def PolygonClearClipboardOptions(*args, **kwargs):
pass
def DeleteAllRigidConstraints(*args, **kwargs):
pass
def SelectPolygonSelectionBoundary(*args, **kwargs):
pass
def PublishChildAnchorOptions(*args, **kwargs):
pass
def HIKBodyPartMode(*args, **kwargs):
pass
def WrinkleToolOptions(*args, **kwargs):
pass
def ModelingPanelRedoViewChange(*args, **kwargs):
pass
def CreateParticleDiskCache(*args, **kwargs):
pass
def ParticleToolOptions(*args, **kwargs):
pass
def GoToNextDrivenKey(*args, **kwargs):
pass
def CreateAmbientLightOptions(*args, **kwargs):
pass
def WireDropoffLocator(*args, **kwargs):
pass
def ExportOfflineFileOptions(*args, **kwargs):
pass
def HypershadeSortByName(*args, **kwargs):
pass
def HypershadeGraphRearrange(*args, **kwargs):
pass
def ConvertToBreakdown(*args, **kwargs):
pass
def Snap3PointsTo3PointsOptions(*args, **kwargs):
pass
def WeightedTangents(*args, **kwargs):
pass
def HypershadeRevertToDefaultTabs(*args, **kwargs):
pass
def DeleteAllExpressions(*args, **kwargs):
pass
def nClothMakeCollideOptions(*args, **kwargs):
pass
def NodeEditorCreateTab(*args, **kwargs):
pass
def SingleViewArrangement(*args, **kwargs):
pass
def NodeEditorCreateNodePopup(*args, **kwargs):
pass
def HypershadeFrameSelected(*args, **kwargs):
pass
def WarpImageOptions(*args, **kwargs):
pass
def CutCurve(*args, **kwargs):
pass
def CutUVs(*args, **kwargs):
pass
def AddPondDynamicBuoyOptions(*args, **kwargs):
pass
def nClothDeleteCacheFramesOpt(*args, **kwargs):
pass
def VolumeAxisOptions(*args, **kwargs):
pass
def MakeLive(*args, **kwargs):
pass
def EnableNRigids(*args, **kwargs):
pass
def ViewImage(*args, **kwargs):
pass
def ToggleAnimationDetails(*args, **kwargs):
pass
def PaintFluidsTool(*args, **kwargs):
pass
def ShowMeshSmoothToolOptions(*args, **kwargs):
pass
def DuplicateWithTransform(*args, **kwargs):
pass
def HypershadePinByDefault(*args, **kwargs):
pass
def BrushPresetBlendOff(*args, **kwargs):
pass
def SculptMeshActivateBrushStrength(*args, **kwargs):
pass
def SelectPolygonToolMarkingMenu(*args, **kwargs):
pass
def UpdateCurrentSceneMudbox(*args, **kwargs):
pass
def ResetWireOptions(*args, **kwargs):
pass
def nClothMergeCacheOpt(*args, **kwargs):
pass
def NodeEditorGraphUpstream(*args, **kwargs):
pass
def CurveSmoothnessFine(*args, **kwargs):
pass
def ScriptPaintTool(*args, **kwargs):
pass
def TrimToolOptions(*args, **kwargs):
pass
def TransplantHairOptions(*args, **kwargs):
pass
def GeometryConstraint(*args, **kwargs):
pass
def LoopBrushAnimation(*args, **kwargs):
pass
def ScaleToolMarkingMenuPopDown(*args, **kwargs):
pass
def SelectObjectsShadowedByLight(*args, **kwargs):
pass
def AddAuxEffector(*args, **kwargs):
pass
def FreezeTransformations(*args, **kwargs):
pass
def ScaleTool(*args, **kwargs):
pass
def DoUnghost(*args, **kwargs):
pass
def HypershadeOpenBrowserWindow(*args, **kwargs):
pass
def SelectHullsMask(*args, **kwargs):
pass
def FrameSelected(*args, **kwargs):
pass
def ChangeEdgeWidth(*args, **kwargs):
pass
def SaveSceneAsOptions(*args, **kwargs):
pass
def CutCurveOptions(*args, **kwargs):
pass
def HypershadeExpandAsset(*args, **kwargs):
pass
def BakeSimulation(*args, **kwargs):
pass
def TexSculptInvertPin(*args, **kwargs):
pass
def HideStrokeControlCurves(*args, **kwargs):
pass
def SaveCurrentLayout(*args, **kwargs):
pass
def geometryReplaceCacheOpt(*args, **kwargs):
pass
def CreateSoftBody(*args, **kwargs):
pass
def GeometryToBoundingBox(*args, **kwargs):
pass
def TestTextureOptions(*args, **kwargs):
pass
def CreateFluidCache(*args, **kwargs):
pass
def RoundToolOptions(*args, **kwargs):
pass
def ScaleCurvatureOptions(*args, **kwargs):
pass
def InsertIsoparmsOptions(*args, **kwargs):
pass
def ProfilerToolThreadView(*args, **kwargs):
pass
def TemplateBrushSettings(*args, **kwargs):
pass
def SubdivSmoothnessRoughOptions(*args, **kwargs):
pass
def OffsetSurfacesOptions(*args, **kwargs):
pass
def AssignOfflineFile(*args, **kwargs):
pass
def DeleteSurfaceFlowOptions(*args, **kwargs):
pass
def ProfilerToolHideSelectedRepetition(*args, **kwargs):
pass
def NodeEditorToggleLockUnlock(*args, **kwargs):
pass
def ReferenceEditor(*args, **kwargs):
pass
def AssetEditor(*args, **kwargs):
pass
def MoveSkinJointsTool(*args, **kwargs):
pass
def CreatePolygonTorusOptions(*args, **kwargs):
pass
def PointConstraint(*args, **kwargs):
pass
def ScaleConstraint(*args, **kwargs):
pass
def PreviousFrame(*args, **kwargs):
pass
def FillHole(*args, **kwargs):
pass
def InTangentFixed(*args, **kwargs):
pass
def ShowMeshPinchToolOptions(*args, **kwargs):
pass
def PerspTextureLayout(*args, **kwargs):
pass
def Art3dPaintTool(*args, **kwargs):
pass
def PrelightPolygon(*args, **kwargs):
pass
def OutlinerToggleIgnoreHidden(*args, **kwargs):
pass
def OutTangentClamped(*args, **kwargs):
pass
def DeleteExpressionsOptions(*args, **kwargs):
pass
def ShowMeshFreezeToolOptions(*args, **kwargs):
pass
def buildSendToBackburnerDialog(*args, **kwargs):
pass
def SnapToPixel(*args, **kwargs):
pass
def CreateRigidBodySolver(*args, **kwargs):
pass
def DeleteAllLights(*args, **kwargs):
pass
def ModifyUpperRadiusPress(*args, **kwargs):
pass
def InsertJointTool(*args, **kwargs):
pass
def ShowMeshAmplifyToolOptions(*args, **kwargs):
pass
def HypershadeDisplayAsMediumSwatches(*args, **kwargs):
pass
def PruneCluster(*args, **kwargs):
pass
def Gravity(*args, **kwargs):
pass
def CreatePose(*args, **kwargs):
pass
def AddToContainerOptions(*args, **kwargs):
pass
def ExtendSurfacesOptions(*args, **kwargs):
pass
def ShowLightManipulators(*args, **kwargs):
pass
def ToggleFaceNormalDisplay(*args, **kwargs):
pass
def Create2DContainerEmitterOptions(*args, **kwargs):
pass
def SetFullBodyIKKeysSelected(*args, **kwargs):
pass
def EnterEditModeRelease(*args, **kwargs):
pass
def HypershadeSetTraversalDepthZero(*args, **kwargs):
pass
def AttachSubdivSurface(*args, **kwargs):
pass
def SmoothSkinWeightsOptions(*args, **kwargs):
pass
def SetFullBodyIKKeys(*args, **kwargs):
pass
def ShowFollicles(*args, **kwargs):
pass
def DeformerSetEditor(*args, **kwargs):
pass
def HideKinematics(*args, **kwargs):
pass
def NextViewArrangement(*args, **kwargs):
pass
def SetEditor(*args, **kwargs):
pass
def ShowDeformingGeometry(*args, **kwargs):
pass
def HIKSetBodyPartKey(*args, **kwargs):
pass
def MediumPolygonNormals(*args, **kwargs):
pass
def ToggleContainerCentric(*args, **kwargs):
pass
def SetBreakdownKeyOptions(*args, **kwargs):
pass
def EnableConstraints(*args, **kwargs):
pass
def AppendToPolygonTool(*args, **kwargs):
pass
def CreatePolygonConeOptions(*args, **kwargs):
pass
def SendAsNewSceneMudbox(*args, **kwargs):
pass
def PaintEffectsToCurve(*args, **kwargs):
pass
def RigidBodySolver(*args, **kwargs):
pass
def HypershadeOpenShaderballWindow(*args, **kwargs):
pass
def BreakRigidBodyConnection(*args, **kwargs):
pass
def SelectVertexFaceMask(*args, **kwargs):
pass
def ResampleCurveOptions(*args, **kwargs):
pass
def U3DBrushSizeOff(*args, **kwargs):
pass
def CreateWrapOptions(*args, **kwargs):
pass
def NURBSSmoothnessRoughOptions(*args, **kwargs):
pass
def WalkTool(*args, **kwargs):
pass
def PolygonBooleanUnionOptions(*args, **kwargs):
pass
def SelectToggleMode(*args, **kwargs):
pass
def ToggleToolbox(*args, **kwargs):
pass
def NURBSSmoothnessHull(*args, **kwargs):
pass
def Lightning(*args, **kwargs):
pass
def ParticleInstancerOptions(*args, **kwargs):
pass
def SelectLightsShadowingObject(*args, **kwargs):
pass
def ATOMTemplateOptions(*args, **kwargs):
pass
def ClearPaintEffectsView(*args, **kwargs):
pass
def ToggleTangentDisplay(*args, **kwargs):
pass
def NEmitFromObjectOptions(*args, **kwargs):
pass
def DisplayLayerEditorWindow(*args, **kwargs):
pass
def CVCurveTool(*args, **kwargs):
pass
def HypergraphHierarchyWindow(*args, **kwargs):
pass
def AveragePolygonNormals(*args, **kwargs):
pass
def CreateSubdivTorus(*args, **kwargs):
pass
def HideNURBSCurves(*args, **kwargs):
pass
def CreateLocator(*args, **kwargs):
pass
def MoveRotateScaleToolToggleSnapRelativeMode(*args, **kwargs):
pass
def SetActiveKey(*args, **kwargs):
pass
def IntersectSurfacesOptions(*args, **kwargs):
pass
def HypershadeRestoreLastClosedTab(*args, **kwargs):
pass
def fluidReplaceCache(*args, **kwargs):
pass
def CreateEmitterOptions(*args, **kwargs):
pass
def SurfaceBooleanUnionToolOptions(*args, **kwargs):
pass
def UnpublishChildAnchor(*args, **kwargs):
pass
def IncreaseGammaFine(*args, **kwargs):
pass
def DetachSurfacesOptions(*args, **kwargs):
pass
def TangetConstraintOptions(*args, **kwargs):
pass
def fluidDeleteCacheFramesOpt(*args, **kwargs):
pass
def SubdivProxy(*args, **kwargs):
pass
def NormalizeUVs(*args, **kwargs):
pass
def ReducePolygon(*args, **kwargs):
pass
def DeleteKeysOptions(*args, **kwargs):
pass
def AttachBrushToCurves(*args, **kwargs):
pass
def fluidAppend(*args, **kwargs):
pass
def ShowUIElements(*args, **kwargs):
pass
def RebuildSurfaces(*args, **kwargs):
pass
def SubdivSmoothnessMediumOptions(*args, **kwargs):
pass
def InTangentAuto(*args, **kwargs):
pass
def ProfilerToolReset(*args, **kwargs):
pass
def CreatePolygonPyramid(*args, **kwargs):
pass
def FloatSelectedObjectsOptions(*args, **kwargs):
pass
def DeleteChannelsOptions(*args, **kwargs):
pass
def ShowStrokePathCurves(*args, **kwargs):
pass
def PlanarProjection(*args, **kwargs):
pass
def dynamicConstraintRemove(*args, **kwargs):
pass
def ExtrudeFace(*args, **kwargs):
pass
def IPRRenderIntoNewWindow(*args, **kwargs):
pass
def ConvertSelectionToShell(*args, **kwargs):
pass
def AssignNewMaterial(*args, **kwargs):
pass
def DeleteAllStaticChannels(*args, **kwargs):
pass
def ShowSculptObjects(*args, **kwargs):
pass
def AimConstraint(*args, **kwargs):
pass
def SculptSubdivsToolOptions(*args, **kwargs):
pass
def TangentsPlateau(*args, **kwargs):
pass
def OptimizeScene(*args, **kwargs):
pass
def DeleteAttribute(*args, **kwargs):
pass
def ShowPlanes(*args, **kwargs):
pass
def PublishRootTransformOptions(*args, **kwargs):
pass
def ProfilerToolCpuView(*args, **kwargs):
pass
def ShowNRigids(*args, **kwargs):
pass
def CreatePointLight(*args, **kwargs):
pass
def CutPolygonOptions(*args, **kwargs):
pass
def nucleusGetnParticleExample(*args, **kwargs):
pass
def GraphCut(*args, **kwargs):
pass
def ToggleIsolateSelect(*args, **kwargs):
pass
def ExportSkinWeightMapsOptions(*args, **kwargs):
pass
def HypershadeToggleNodeSwatchSize(*args, **kwargs):
pass
def nucleusDisplayOtherNodes(*args, **kwargs):
pass
def ToggleEdgeIDs(*args, **kwargs):
pass
def CopySelected(*args, **kwargs):
pass
def OutlinerToggleNamespace(*args, **kwargs):
pass
def ShowMeshRepeatToolOptions(*args, **kwargs):
pass
def HypershadeSelectMaterialsFromObjects(*args, **kwargs):
pass
def CreateOceanWakeOptions(*args, **kwargs):
pass
def NodeEditorGraphAllShapesExceptShading(*args, **kwargs):
pass
def OutlinerToggleAttributes(*args, **kwargs):
pass
def ProductInformation(*args, **kwargs):
pass
def DeactivateGlobalScreenSlider(*args, **kwargs):
pass
def HideFluids(*args, **kwargs):
pass
def NewScene(*args, **kwargs):
pass
def OutlinerDoHide(*args, **kwargs):
pass
def AddInbetween(*args, **kwargs):
pass
def ToggleBorderEdges(*args, **kwargs):
pass
def PaintOnViewPlane(*args, **kwargs):
pass
def SwapBufferCurve(*args, **kwargs):
pass
def EditMembershipTool(*args, **kwargs):
pass
def OrientJointOptions(*args, **kwargs):
pass
def OutlinerWindow(*args, **kwargs):
pass
def ReverseCurveOptions(*args, **kwargs):
pass
def geometryReplaceCacheFramesOpt(*args, **kwargs):
pass
def SelectAllJoints(*args, **kwargs):
pass
def PreviousKey(*args, **kwargs):
pass
def IntersectCurve(*args, **kwargs):
pass
def CustomNURBSSmoothnessOptions(*args, **kwargs):
pass
def OptimzeUVs(*args, **kwargs):
pass
def CopySkinWeightsOptions(*args, **kwargs):
pass
def PositionAlongCurve(*args, **kwargs):
pass
def ExtractSubdivSurfaceVertices(*args, **kwargs):
pass
def MakeCollide(*args, **kwargs):
pass
def OpenScene(*args, **kwargs):
pass
def PolySpinEdgeBackward(*args, **kwargs):
pass
def AddDivisionsOptions(*args, **kwargs):
pass
def SelectAllClusters(*args, **kwargs):
pass
def LastActionTool(*args, **kwargs):
pass
def ParentBaseWireOptions(*args, **kwargs):
pass
def DuplicateCurve(*args, **kwargs):
pass
def TogglePanelMenubar(*args, **kwargs):
pass
def TangentsAuto(*args, **kwargs):
pass
def CharacterMapper(*args, **kwargs):
pass
def ModifyUVVectorPress(*args, **kwargs):
pass
def DisableIKSolvers(*args, **kwargs):
pass
def HypershadeGraphMaterialsOnSelectedObjects(*args, **kwargs):
pass
| 13.375264
| 74
| 0.68232
| 11,856
| 126,851
| 7.30027
| 0.200574
| 0.273824
| 0.383353
| 0.465304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00033
| 0.186881
| 126,851
| 9,483
| 75
| 13.376674
| 0.8388
| 0
| 0
| 0.499895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.499895
| true
| 0.500738
| 0.001687
| 0
| 0.501582
| 0.000422
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
f424eee11d351b093cf0ab404eccedfb1c5de836
| 13,620
|
py
|
Python
|
src/docker-images/gpu-reporter/test/test_reporter.py
|
debbie-alaine/DLWorkspace
|
2888042c0f9388f911bc74fe5ecd20ef3fabd715
|
[
"MIT"
] | 2
|
2019-10-16T23:54:34.000Z
|
2019-11-07T00:08:32.000Z
|
src/docker-images/gpu-reporter/test/test_reporter.py
|
debbie-alaine/DLWorkspace
|
2888042c0f9388f911bc74fe5ecd20ef3fabd715
|
[
"MIT"
] | null | null | null |
src/docker-images/gpu-reporter/test/test_reporter.py
|
debbie-alaine/DLWorkspace
|
2888042c0f9388f911bc74fe5ecd20ef3fabd715
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os
import sys
import unittest
import yaml
import json
import logging
import logging.config
import datetime
sys.path.append(os.path.abspath("../src"))
import reporter
logger = logging.getLogger(__name__)
class TestReporter(unittest.TestCase):
"""
Test reporter.py
"""
def setUp(self):
try:
os.chdir(os.path.abspath("test"))
except:
pass
configuration_path = "logging.yaml"
if os.path.exists(configuration_path):
with open(configuration_path, 'rt') as f:
logging_configuration = yaml.safe_load(f.read())
logging.config.dictConfig(logging_configuration)
logging.getLogger()
def load_data(self, path):
with open(path) as f:
return f.read()
def get_samples(self, metrics, target_name, labels):
result = []
for m in metrics:
if m.name != target_name:
continue
for sample in m.samples:
add = True
for k, v in labels.items():
if sample.labels.get(k) != v:
add = False
break
if add:
result.append(sample)
return result
def test_calculate(self):
step_seconds = 300
idleness_threshold = 0
end = datetime.datetime.fromtimestamp(1587768375)
calculator = reporter.IdlenessCalculator(step_seconds,
idleness_threshold, end)
raw = json.loads(self.load_data("data/raw.json"))
result = reporter.calculate(raw, calculator)
self.assertTrue(result["31d"]["booked"] >= result["14d"]["booked"])
self.assertTrue(result["14d"]["booked"] >= result["7d"]["booked"])
self.assertTrue(result["31d"]["idle"] >= result["14d"]["idle"])
self.assertTrue(result["14d"]["idle"] >= result["7d"]["idle"])
self.assertEqual(50.0, result["31d"]["next"]["vvv"]["assigned_util"])
self.assertEqual(100.0, result["31d"]["next"]["vvv"]["nonidle_util"])
self.assertEqual(600, result["31d"]["next"]["vvv"]["booked"])
self.assertEqual(300, result["31d"]["next"]["vvv"]["idle"])
self.assertEqual(20.0,
result["31d"]["next"]["platform"]["assigned_util"])
self.assertEqual(20.0,
result["31d"]["next"]["platform"]["nonidle_util"])
self.assertEqual(300, result["31d"]["next"]["platform"]["booked"])
self.assertEqual(0, result["31d"]["next"]["platform"]["idle"])
def assert_metric_value(self, metrics, target_name, labels, value):
target = self.get_samples(metrics, target_name, labels)
self.assertEqual(1, len(target), "zero/multiple target %s" % (target))
self.assertEqual(value, target[0].value)
def test_walk_exported_regiter(self):
obj = json.loads(self.load_data("data/exported.json"))
collector = reporter.CustomCollector(None)
metrics = collector.walk_exported_register(obj)
self.assert_metric_value(metrics, "cluster_booked_gpu_second",
{"since": "31d"}, 900)
self.assert_metric_value(metrics, "cluster_idle_gpu_second",
{"since": "31d"}, 300)
self.assert_metric_value(metrics, "cluster_non_idle_utils",
{"since": "31d"}, 60)
self.assert_metric_value(metrics, "cluster_assigned_utils",
{"since": "31d"}, 40)
self.assert_metric_value(metrics, "cluster_booked_gpu_second",
{"since": "14d"}, 900)
self.assert_metric_value(metrics, "cluster_idle_gpu_second",
{"since": "14d"}, 300)
self.assert_metric_value(metrics, "cluster_non_idle_utils",
{"since": "14d"}, 60)
self.assert_metric_value(metrics, "cluster_assigned_utils",
{"since": "14d"}, 40)
self.assert_metric_value(metrics, "cluster_booked_gpu_second",
{"since": "7d"}, 0)
self.assert_metric_value(metrics, "cluster_idle_gpu_second",
{"since": "7d"}, 0)
self.assert_metric_value(metrics, "cluster_non_idle_utils",
{"since": "7d"}, 0)
self.assert_metric_value(metrics, "cluster_assigned_utils",
{"since": "7d"}, 0)
self.assert_metric_value(metrics, "vc_booked_gpu_second", {
"since": "31d",
"vc": "platform"
}, 300)
self.assert_metric_value(metrics, "vc_booked_gpu_second", {
"since": "31d",
"vc": "vvv"
}, 600)
self.assert_metric_value(metrics, "vc_booked_gpu_second", {
"since": "14d",
"vc": "platform"
}, 300)
self.assert_metric_value(metrics, "vc_booked_gpu_second", {
"since": "14d",
"vc": "vvv"
}, 600)
self.assert_metric_value(metrics, "vc_idle_gpu_second", {
"since": "31d",
"vc": "platform"
}, 0)
self.assert_metric_value(metrics, "vc_idle_gpu_second", {
"since": "31d",
"vc": "vvv"
}, 300)
self.assert_metric_value(metrics, "vc_idle_gpu_second", {
"since": "14d",
"vc": "platform"
}, 0)
self.assert_metric_value(metrics, "vc_idle_gpu_second", {
"since": "14d",
"vc": "vvv"
}, 300)
self.assert_metric_value(metrics, "vc_non_idle_utils", {
"since": "31d",
"vc": "platform"
}, 20)
self.assert_metric_value(metrics, "vc_non_idle_utils", {
"since": "31d",
"vc": "vvv"
}, 100)
self.assert_metric_value(metrics, "vc_non_idle_utils", {
"since": "14d",
"vc": "platform"
}, 20)
self.assert_metric_value(metrics, "vc_non_idle_utils", {
"since": "14d",
"vc": "vvv"
}, 100)
self.assert_metric_value(metrics, "vc_assigned_utils", {
"since": "31d",
"vc": "platform"
}, 20)
self.assert_metric_value(metrics, "vc_assigned_utils", {
"since": "31d",
"vc": "vvv"
}, 50)
self.assert_metric_value(metrics, "vc_assigned_utils", {
"since": "14d",
"vc": "platform"
}, 20)
self.assert_metric_value(metrics, "vc_assigned_utils", {
"since": "14d",
"vc": "vvv"
}, 50)
self.assert_metric_value(metrics, "user_booked_gpu_second", {
"since": "31d",
"vc": "platform",
"user": "bbb"
}, 300)
self.assert_metric_value(metrics, "user_booked_gpu_second", {
"since": "31d",
"vc": "vvv",
"user": "aaa"
}, 600)
self.assert_metric_value(metrics, "user_booked_gpu_second", {
"since": "14d",
"vc": "platform",
"user": "bbb"
}, 300)
self.assert_metric_value(metrics, "user_booked_gpu_second", {
"since": "14d",
"vc": "vvv",
"user": "aaa"
}, 600)
self.assert_metric_value(metrics, "user_idle_gpu_second", {
"since": "31d",
"vc": "platform",
"user": "bbb"
}, 0)
self.assert_metric_value(metrics, "user_idle_gpu_second", {
"since": "31d",
"vc": "vvv",
"user": "aaa"
}, 300)
self.assert_metric_value(metrics, "user_idle_gpu_second", {
"since": "14d",
"vc": "platform",
"user": "bbb"
}, 0)
self.assert_metric_value(metrics, "user_idle_gpu_second", {
"since": "14d",
"vc": "vvv",
"user": "aaa"
}, 300)
self.assert_metric_value(metrics, "user_non_idle_utils", {
"since": "31d",
"vc": "platform",
"user": "bbb"
}, 20)
self.assert_metric_value(metrics, "user_non_idle_utils", {
"since": "31d",
"vc": "vvv",
"user": "aaa"
}, 100)
self.assert_metric_value(metrics, "user_non_idle_utils", {
"since": "14d",
"vc": "platform",
"user": "bbb"
}, 20)
self.assert_metric_value(metrics, "user_non_idle_utils", {
"since": "14d",
"vc": "vvv",
"user": "aaa"
}, 100)
self.assert_metric_value(metrics, "user_assigned_utils", {
"since": "31d",
"vc": "platform",
"user": "bbb"
}, 20)
self.assert_metric_value(metrics, "user_assigned_utils", {
"since": "31d",
"vc": "vvv",
"user": "aaa"
}, 50)
self.assert_metric_value(metrics, "user_assigned_utils", {
"since": "14d",
"vc": "platform",
"user": "bbb"
}, 20)
self.assert_metric_value(metrics, "user_assigned_utils", {
"since": "14d",
"vc": "vvv",
"user": "aaa"
}, 50)
self.assert_metric_value(
metrics, "job_booked_gpu_second", {
"since": "31d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 300)
self.assert_metric_value(
metrics, "job_booked_gpu_second", {
"since": "31d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 600)
self.assert_metric_value(
metrics, "job_booked_gpu_second", {
"since": "14d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 300)
self.assert_metric_value(
metrics, "job_booked_gpu_second", {
"since": "14d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 600)
self.assert_metric_value(
metrics, "job_idle_gpu_second", {
"since": "31d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 0)
self.assert_metric_value(
metrics, "job_idle_gpu_second", {
"since": "31d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 300)
self.assert_metric_value(
metrics, "job_idle_gpu_second", {
"since": "14d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 0)
self.assert_metric_value(
metrics, "job_idle_gpu_second", {
"since": "14d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 300)
self.assert_metric_value(
metrics, "job_non_idle_utils", {
"since": "31d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 20)
self.assert_metric_value(
metrics, "job_non_idle_utils", {
"since": "31d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 100)
self.assert_metric_value(
metrics, "job_non_idle_utils", {
"since": "14d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 20)
self.assert_metric_value(
metrics, "job_non_idle_utils", {
"since": "14d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 100)
self.assert_metric_value(
metrics, "job_assigned_utils", {
"since": "31d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 20)
self.assert_metric_value(
metrics, "job_assigned_utils", {
"since": "31d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 50)
self.assert_metric_value(
metrics, "job_assigned_utils", {
"since": "14d",
"vc": "platform",
"user": "bbb",
"job_id": "89ba301e-58d0-4ce5-ba6b-5781a7926f63"
}, 20)
self.assert_metric_value(
metrics, "job_assigned_utils", {
"since": "14d",
"vc": "vvv",
"user": "aaa",
"job_id": "f29f07ab-8510-4ad9-ac24-363bd7271571"
}, 50)
if __name__ == '__main__':
unittest.main()
| 35.748031
| 78
| 0.487445
| 1,327
| 13,620
| 4.743783
| 0.103994
| 0.116283
| 0.164734
| 0.200159
| 0.760445
| 0.740111
| 0.722319
| 0.722319
| 0.70834
| 0.693249
| 0
| 0.077279
| 0.365345
| 13,620
| 380
| 79
| 35.842105
| 0.650972
| 0.00279
| 0
| 0.778409
| 0
| 0
| 0.238685
| 0.075483
| 0
| 0
| 0
| 0
| 0.213068
| 1
| 0.017045
| false
| 0.002841
| 0.025568
| 0
| 0.051136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f431ff39b73829ca20eb29f0720a42f69cc40d0f
| 113
|
py
|
Python
|
src/nukleus/Typing.py
|
spielhuus/nukleus
|
55d7ae6051720213024fa20c8c9a92110f5566ce
|
[
"MIT"
] | null | null | null |
src/nukleus/Typing.py
|
spielhuus/nukleus
|
55d7ae6051720213024fa20c8c9a92110f5566ce
|
[
"MIT"
] | null | null | null |
src/nukleus/Typing.py
|
spielhuus/nukleus
|
55d7ae6051720213024fa20c8c9a92110f5566ce
|
[
"MIT"
] | null | null | null |
from typing import Tuple, TypeAlias, List
POS_T: TypeAlias = Tuple[float, float]
PTS_T: TypeAlias = List[POS_T]
| 22.6
| 41
| 0.761062
| 18
| 113
| 4.611111
| 0.555556
| 0.313253
| 0.385542
| 0.409639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141593
| 113
| 4
| 42
| 28.25
| 0.85567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f490026de3dabd7ef88f192dfea28f12d103a32d
| 2,726
|
py
|
Python
|
secondLineFollowers/tests.py
|
LewickiMaciej/twitterFollowers
|
839278d77c4c63e2f7c077c007f20b3c9af134a7
|
[
"MIT"
] | null | null | null |
secondLineFollowers/tests.py
|
LewickiMaciej/twitterFollowers
|
839278d77c4c63e2f7c077c007f20b3c9af134a7
|
[
"MIT"
] | null | null | null |
secondLineFollowers/tests.py
|
LewickiMaciej/twitterFollowers
|
839278d77c4c63e2f7c077c007f20b3c9af134a7
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from .twitterFollowers import TwitterFollowers
class TwitterAddFollowersToListTests(TestCase):
def test_add_followers_to_list_only_insert(self):
second_line_followers = {'aaa': 1, 'ccc': 1, 'eee': 1}
followers_of_follower = {'bbb', 'ddd', 'fff'}
set_of_second_line = {'aaa', 'ccc', 'eee'}
result = {'aaa': 1, 'bbb': 1, 'ccc': 1, 'ddd': 1, 'eee': 1, 'fff': 1}
TwitterFollowers.addFollowersToList(None,
second_line_followers, followers_of_follower, set_of_second_line
)
self.assertEqual(second_line_followers, result)
def test_count_follower(self):
second_line_followers = {'aaa': 2, 'ccc': 1, 'eee': 1}
followers_of_follower = {'aaa', 'bbb', 'ddd', 'eee', 'fff'}
set_of_second_line = {'aaa', 'ccc', 'eee'}
result = {'aaa': 3, 'bbb': 1, 'ccc': 1, 'ddd': 1, 'eee': 2, 'fff': 1}
TwitterFollowers.addFollowersToList(
None, second_line_followers,
followers_of_follower, set_of_second_line
)
self.assertEqual(second_line_followers, result)
def test_followers_are_longer(self):
second_line_followers = {'aaa': 4, 'ccc': 7, 'eee': 2}
followers_of_follower = {'aaa', 'bbb', 'ddd', 'fff', 'ggg'}
set_of_second_line = {'aaa', 'ccc', 'eee'}
result = {'aaa': 5, 'bbb': 1, 'ccc': 7, 'ddd': 1,
'eee': 2, 'fff': 1, 'ggg': 1
}
TwitterFollowers.addFollowersToList(
None, second_line_followers,
followers_of_follower, set_of_second_line
)
self.assertEqual(second_line_followers, result)
def test_second_line_is_longer(self):
second_line_followers = {'aaa': 4, 'ccc': 7, 'eee': 2}
followers_of_follower = {'aaa', 'bbb'}
set_of_second_line = {'aaa', 'ccc', 'eee'}
result = {'aaa': 5, 'bbb': 1, 'ccc': 7, 'eee': 2}
set_after = {'aaa', 'bbb', 'ccc', 'eee'}
TwitterFollowers.addFollowersToList(
None, second_line_followers,
followers_of_follower, set_of_second_line
)
self.assertEqual(set_after, set_of_second_line)
self.assertEqual(second_line_followers, result)
def test_second_line_is_empty(self):
second_line_followers = {}
followers_of_follower = {'aaa', 'bbb'}
set_of_second_line = set()
result = {'aaa': 1, 'bbb': 1}
TwitterFollowers.addFollowersToList(
None, second_line_followers,
followers_of_follower, set_of_second_line
)
self.assertEqual(second_line_followers, result)
def test_followers_of_follower_is_empty(self):
second_line_followers = {'aaa': 4, 'ccc': 7, 'eee': 2}
followers_of_follower = set()
set_of_second_line = {'aaa', 'ccc', 'eee'}
result = {'aaa': 4, 'ccc': 7, 'eee': 2}
TwitterFollowers.addFollowersToList(
None, second_line_followers,
followers_of_follower, set_of_second_line
)
self.assertEqual(second_line_followers, result)
| 30.977273
| 71
| 0.695525
| 368
| 2,726
| 4.8125
| 0.119565
| 0.186335
| 0.193111
| 0.110107
| 0.867871
| 0.830604
| 0.791643
| 0.744212
| 0.744212
| 0.702993
| 0
| 0.018566
| 0.150404
| 2,726
| 87
| 72
| 31.333333
| 0.746114
| 0
| 0
| 0.469697
| 0
| 0
| 0.086941
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 1
| 0.090909
| false
| 0
| 0.030303
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be317a94213ced197fe16aa03399ed656ffb70c2
| 14,472
|
py
|
Python
|
ckanext/validation/tests/test_plugin.py
|
fjelltopp/ckanext-validation
|
b6bb42a10cbc9dc91bc9089b17ef7062c3dd931a
|
[
"MIT"
] | null | null | null |
ckanext/validation/tests/test_plugin.py
|
fjelltopp/ckanext-validation
|
b6bb42a10cbc9dc91bc9089b17ef7062c3dd931a
|
[
"MIT"
] | 12
|
2020-10-30T16:52:53.000Z
|
2021-11-08T08:38:13.000Z
|
ckanext/validation/tests/test_plugin.py
|
fjelltopp/ckanext-validation
|
b6bb42a10cbc9dc91bc9089b17ef7062c3dd931a
|
[
"MIT"
] | 1
|
2021-07-22T11:55:44.000Z
|
2021-07-22T11:55:44.000Z
|
import mock
from nose.tools import assert_equals
import pytest
from ckan.tests.helpers import call_action, reset_db
from ckan.tests import factories
from ckan.tests.helpers import change_config
from ckanext.validation.model import create_tables, tables_exist
from ckanext.validation.jobs import run_validation_job
@pytest.mark.ckan_config('ckan.plugins', 'validation')
@pytest.mark.usefixtures('with_plugins')
class TestResourceControllerHooksUpdate(object):
def setup(self):
reset_db()
if not tables_exist():
create_tables()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_on_other_fields(self, mock_enqueue):
resource = {'format': 'CSV'}
dataset = factories.Dataset(resources=[resource])
dataset['resources'][0]['description'] = 'Some resource'
call_action('resource_update', {}, **dataset['resources'][0])
mock_enqueue.assert_not_called()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_keys_persisted(self, mock_enqueue):
original_resource = {
"format": "CSV",
"validation_timestamp": "2021-09-02T10:02:42.936205",
"validation_status": "failed"
}
dataset = factories.Dataset(resources=[original_resource])
updated_resource = dataset['resources'][0]
updated_resource['description'] = 'Some resource'
del updated_resource['validation_timestamp']
del updated_resource['validation_status']
updated_resource = call_action('resource_update', {}, **updated_resource)
assert updated_resource['validation_timestamp'] == original_resource['validation_timestamp']
assert updated_resource['validation_status'] == original_resource['validation_status']
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_on_other_formats(self, mock_enqueue):
resource = {'format': 'PDF'}
dataset = factories.Dataset(resources=[resource])
call_action('resource_update', {}, **dataset['resources'][0])
mock_enqueue.assert_not_called()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_on_upload(self, mock_enqueue):
resource = {
'format': 'CSV',
'upload': 'mock_upload',
'url_type': 'upload'
}
dataset = factories.Dataset(resources=[resource])
call_action('resource_update', {}, **dataset['resources'][0])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(
mock_enqueue.call_args[0][1][0]['id'],
dataset['resources'][0]['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_on_url_change(self, mock_enqueue):
resource = {'format': 'CSV', 'url': 'https://some.url'}
dataset = factories.Dataset(resources=[resource])
dataset['resources'][0]['url'] = 'https://some.new.url'
call_action('resource_update', {}, **dataset['resources'][0])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(
mock_enqueue.call_args[0][1][0]['id'],
dataset['resources'][0]['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_on_schema_change(self, mock_enqueue):
resource = {
'url': 'http://some.url',
'format': 'CSV',
'schema': {
'fields': [
{'name': 'code'}
]
}
}
dataset = factories.Dataset(resources=[resource])
dataset['resources'][0]['schema'] = {
'fields': [
{'name': 'code'},
{'name': 'date'}
]
}
call_action('resource_update', {}, **dataset['resources'][0])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(
mock_enqueue.call_args[0][1][0]['id'],
dataset['resources'][0]['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_on_format_change(self, mock_enqueue):
resource = factories.Resource()
resource['format'] = 'CSV'
call_action('resource_update', {}, **resource)
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@change_config('ckanext.validation.run_on_update_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_when_config_false(self, mock_enqueue):
resource = factories.Resource(format='CSV')
resource['url'] = 'http://some.new.url'
call_action('resource_update', {}, **resource)
mock_enqueue.assert_not_called()
@pytest.mark.ckan_config(u'ckan.plugins', u'validation')
@pytest.mark.usefixtures(u'with_plugins')
class TestResourceControllerHooksCreate(object):
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_on_other_formats(self, mock_enqueue):
factories.Resource(format='PDF')
mock_enqueue.assert_not_called()
@mock.patch('ckanext.validation.logic.enqueue_job')
@change_config('ckanext.validation.run_on_update_async', False)
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_with_upload(self, mock_enqueue):
resource = factories.Resource(format='CSV', url_type='upload')
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@mock.patch('ckanext.validation.logic.enqueue_job')
@change_config('ckanext.validation.run_on_update_async', False)
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_with_url(self, mock_enqueue):
resource = factories.Resource(format='CSV', url='http://some.data')
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@change_config('ckanext.validation.run_on_update_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_when_config_false(self, mock_enqueue):
dataset = factories.Dataset()
resource = {
'format': 'CSV',
'url': 'http://some.data',
'package_id': dataset['id'],
}
call_action('resource_create', {}, **resource)
mock_enqueue.assert_not_called()
@pytest.mark.ckan_config(u'ckan.plugins', u'validation')
@pytest.mark.usefixtures(u'with_plugins')
class TestPackageControllerHooksCreate(object):
def setup(self):
reset_db()
if not tables_exist():
create_tables()
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_on_other_formats(self, mock_enqueue):
factories.Dataset(resources=[{'format': 'PDF'}])
mock_enqueue.assert_not_called()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_when_config_false(self, mock_enqueue):
factories.Dataset(resources=[
{'format': 'CSV', 'url': 'http://some.data'}])
mock_enqueue.assert_not_called()
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_with_upload(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'CSV',
'url_type': 'upload'
}
factories.Dataset(resources=[resource])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_with_url(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'CSV',
'url': 'http://some.data'
}
factories.Dataset(resources=[resource])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_only_supported_formats(self, mock_enqueue):
resource1 = {
'id': 'test-resource-id-1',
'format': 'CSV',
'url': 'http://some.data'
}
resource2 = {
'id': 'test-resource-id-2',
'format': 'PDF',
'url': 'http://some.doc'
}
factories.Dataset(resources=[resource1, resource2])
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource1['id'])
@pytest.mark.ckan_config(u'ckan.plugins', u'validation')
@pytest.mark.usefixtures(u'with_plugins')
class TestPackageControllerHooksUpdate(object):
def setup(self):
reset_db()
if not tables_exist():
create_tables()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_runs_with_url(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'CSV',
'url': 'http://some.data'
}
dataset = factories.Dataset(resources=[resource], id='myid')
mock_enqueue.assert_not_called()
dataset['resources'][0]['url'] = 'http://some.other.data'
call_action('package_update', {}, **dataset)
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_runs_with_upload(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'CSV',
'url_type': 'upload'
}
dataset = factories.Dataset(resources=[resource])
mock_enqueue.assert_not_called()
dataset['resources'][0]['url'] = 'http://some.other.data'
call_action('package_update', {}, **dataset)
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_does_not_run_on_other_formats(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'PDF',
'url': 'http://some.doc'
}
dataset = factories.Dataset(resources=[resource])
mock_enqueue.assert_not_called()
dataset['resources'][0]['url'] = 'http://some.other.doc'
call_action('package_update', {}, **dataset)
mock_enqueue.assert_not_called()
@change_config('ckanext.validation.run_on_create_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
@pytest.mark.skip(reason="Test fails in 2.9")
def test_validation_run_only_supported_formats(self, mock_enqueue):
resource1 = {
'id': 'test-resource-id-1',
'format': 'CSV',
'url': 'http://some.data'
}
resource2 = {
'id': 'test-resource-id-2',
'format': 'PDF',
'url': 'http://some.doc'
}
dataset = factories.Dataset(resources=[resource1, resource2])
mock_enqueue.assert_not_called()
dataset['resources'][0]['url'] = 'http://some.other.data'
call_action('package_update', {}, **dataset)
assert_equals(mock_enqueue.call_count, 1)
assert_equals(mock_enqueue.call_args[0][0], run_validation_job)
assert_equals(mock_enqueue.call_args[0][1][0]['id'], resource1['id'])
@change_config('ckanext.validation.run_on_create_async', False)
@change_config('ckanext.validation.run_on_update_async', False)
@mock.patch('ckanext.validation.logic.enqueue_job')
def test_validation_does_not_run_when_config_false(self, mock_enqueue):
resource = {
'id': 'test-resource-id',
'format': 'CSV',
'url': 'http://some.data'
}
dataset = factories.Dataset(resources=[resource])
call_action('package_update', {}, **dataset)
mock_enqueue.assert_not_called()
| 33.971831
| 100
| 0.653607
| 1,735
| 14,472
| 5.167723
| 0.062824
| 0.087107
| 0.064243
| 0.092349
| 0.870734
| 0.844858
| 0.827682
| 0.810618
| 0.787196
| 0.778943
| 0
| 0.012853
| 0.204326
| 14,472
| 425
| 101
| 34.051765
| 0.765784
| 0
| 0
| 0.724832
| 0
| 0
| 0.236802
| 0.109038
| 0
| 0
| 0
| 0
| 0.174497
| 1
| 0.083893
| false
| 0
| 0.026846
| 0
| 0.124161
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be395be6e4138d7be2b7373988d62d3cc42e4279
| 24,372
|
py
|
Python
|
swagger_client/api/custom_speech_datasets_for_model_adaptation_api.py
|
networthdata/generated-swagger-client
|
41dd3fb02b322ed1d39cbaef6b4091ae6cab0d0b
|
[
"MIT"
] | null | null | null |
swagger_client/api/custom_speech_datasets_for_model_adaptation_api.py
|
networthdata/generated-swagger-client
|
41dd3fb02b322ed1d39cbaef6b4091ae6cab0d0b
|
[
"MIT"
] | null | null | null |
swagger_client/api/custom_speech_datasets_for_model_adaptation_api.py
|
networthdata/generated-swagger-client
|
41dd3fb02b322ed1d39cbaef6b4091ae6cab0d0b
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Speech Services API v2.0
Speech Services API v2.0. # noqa: E501
OpenAPI spec version: v2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class CustomSpeechDatasetsForModelAdaptationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_dataset(self, id, **kwargs): # noqa: E501
"""Deletes the specified dataset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dataset(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_dataset_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_dataset_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_dataset_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes the specified dataset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dataset_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_dataset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_dataset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dataset(self, id, **kwargs): # noqa: E501
"""Gets the dataset identified by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dataset(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:return: Dataset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dataset_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_dataset_with_http_info(id, **kwargs) # noqa: E501
return data
def get_dataset_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets the dataset identified by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dataset_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:return: Dataset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dataset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_dataset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dataset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_datasets(self, **kwargs): # noqa: E501
"""Gets a list of datasets for the authenticated subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasets(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Dataset]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_datasets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_datasets_with_http_info(**kwargs) # noqa: E501
return data
def get_datasets_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of datasets for the authenticated subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Dataset]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_datasets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Dataset]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_supported_locales_for_datasets(self, **kwargs): # noqa: E501
"""Gets a list of supported locales for data imports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_supported_locales_for_datasets(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: IReadOnlyDictionary2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_supported_locales_for_datasets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_supported_locales_for_datasets_with_http_info(**kwargs) # noqa: E501
return data
def get_supported_locales_for_datasets_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of supported locales for data imports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_supported_locales_for_datasets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: IReadOnlyDictionary2
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_supported_locales_for_datasets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets/locales', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IReadOnlyDictionary2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_dataset(self, id, dataset_update, **kwargs): # noqa: E501
"""Updates the mutable details of the dataset identified by its ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dataset(id, dataset_update, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:param DatasetUpdate dataset_update: The updated values for the dataset. (required)
:return: Dataset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_dataset_with_http_info(id, dataset_update, **kwargs) # noqa: E501
else:
(data) = self.update_dataset_with_http_info(id, dataset_update, **kwargs) # noqa: E501
return data
def update_dataset_with_http_info(self, id, dataset_update, **kwargs): # noqa: E501
"""Updates the mutable details of the dataset identified by its ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dataset_with_http_info(id, dataset_update, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the dataset. (required)
:param DatasetUpdate dataset_update: The updated values for the dataset. (required)
:return: Dataset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'dataset_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_dataset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_dataset`") # noqa: E501
# verify the required parameter 'dataset_update' is set
if ('dataset_update' not in params or
params['dataset_update'] is None):
raise ValueError("Missing the required parameter `dataset_update` when calling `update_dataset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'dataset_update' in params:
body_params = params['dataset_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dataset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_dataset(self, **kwargs): # noqa: E501
"""Uploads data and creates a new dataset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_dataset(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of this data import (always add this string for any import).
:param str description: Optional description of this data import.
:param str locale: The locale of this data import (always add this string for any import).
:param str data_import_kind: The kind of the data import (always add this string for any import).
:param str properties: Optional properties of this data import (json serialized object with key/values, where all values must be strings)
:param file audiodata: A zip file containing the audio data (this and the audio archive file for acoustic data imports).
:param file transcriptions: A text file containing the transcriptions for the audio data (this and the transcriptions file for acoustic data imports).
:param file languagedata: A text file containing the language or pronunciation data (only this file for language data imports).
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_dataset_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.upload_dataset_with_http_info(**kwargs) # noqa: E501
return data
def upload_dataset_with_http_info(self, **kwargs): # noqa: E501
"""Uploads data and creates a new dataset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_dataset_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of this data import (always add this string for any import).
:param str description: Optional description of this data import.
:param str locale: The locale of this data import (always add this string for any import).
:param str data_import_kind: The kind of the data import (always add this string for any import).
:param str properties: Optional properties of this data import (json serialized object with key/values, where all values must be strings)
:param file audiodata: A zip file containing the audio data (this and the audio archive file for acoustic data imports).
:param file transcriptions: A text file containing the transcriptions for the audio data (this and the transcriptions file for acoustic data imports).
:param file languagedata: A text file containing the language or pronunciation data (only this file for language data imports).
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'description', 'locale', 'data_import_kind', 'properties', 'audiodata', 'transcriptions', 'languagedata'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_dataset" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'name' in params:
form_params.append(('name', params['name'])) # noqa: E501
if 'description' in params:
form_params.append(('description', params['description'])) # noqa: E501
if 'locale' in params:
form_params.append(('locale', params['locale'])) # noqa: E501
if 'data_import_kind' in params:
form_params.append(('dataImportKind', params['data_import_kind'])) # noqa: E501
if 'properties' in params:
form_params.append(('properties', params['properties'])) # noqa: E501
if 'audiodata' in params:
local_var_files['audiodata'] = params['audiodata'] # noqa: E501
if 'transcriptions' in params:
local_var_files['transcriptions'] = params['transcriptions'] # noqa: E501
if 'languagedata' in params:
local_var_files['languagedata'] = params['languagedata'] # noqa: E501
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['subscription_key', 'token'] # noqa: E501
return self.api_client.call_api(
'/api/speechtotext/v2.0/datasets/upload', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.019704
| 158
| 0.614599
| 2,847
| 24,372
| 5.038286
| 0.070952
| 0.043503
| 0.023424
| 0.030117
| 0.92401
| 0.899888
| 0.89215
| 0.885806
| 0.87981
| 0.868865
| 0
| 0.015217
| 0.296242
| 24,372
| 608
| 159
| 40.085526
| 0.82107
| 0.355285
| 0
| 0.755352
| 1
| 0
| 0.184975
| 0.044687
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039755
| false
| 0
| 0.021407
| 0
| 0.119266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
beb917e1571d26d6c01f621deea680a74be4abd4
| 4,253
|
py
|
Python
|
nexus-ingest/nexus-xd-python-modules/tests/winddirspeedtouv_test.py
|
dataplumber/nexus
|
f25a89e85eba098da9c6db1ff3d408dae8a6b310
|
[
"Apache-2.0"
] | 23
|
2016-08-09T22:45:14.000Z
|
2020-02-17T08:18:29.000Z
|
nexus-ingest/nexus-xd-python-modules/tests/winddirspeedtouv_test.py
|
lewismc/incubator-sdap-nexus
|
ff98fa346303431542b8391cc2a1bf7561d1bd03
|
[
"Apache-2.0"
] | 6
|
2017-04-27T21:22:17.000Z
|
2021-06-01T21:45:52.000Z
|
nexus-ingest/nexus-xd-python-modules/tests/winddirspeedtouv_test.py
|
dataplumber/nexus
|
f25a89e85eba098da9c6db1ff3d408dae8a6b310
|
[
"Apache-2.0"
] | 5
|
2016-08-31T13:47:29.000Z
|
2017-11-14T21:45:22.000Z
|
"""
Copyright (c) 2016 Jet Propulsion Laboratory,
California Institute of Technology. All rights reserved
"""
import importlib
import unittest
from os import environ, path
import nexusproto.NexusContent_pb2 as nexusproto
import numpy as np
from nexusproto.serialization import from_shaped_array
class TestAscatbUData(unittest.TestCase):
def setUp(self):
environ['U_OR_V'] = 'U'
environ['INBOUND_PORT'] = '7890'
environ['OUTBOUND_PORT'] = '7891'
self.module = importlib.import_module('nexusxd.winddirspeedtouv')
reload(self.module)
def tearDown(self):
del environ['U_OR_V']
del environ['INBOUND_PORT']
del environ['OUTBOUND_PORT']
def test_u_conversion(self):
test_file = path.join(path.dirname(__file__), 'dumped_nexustiles', 'ascatb_nonempty_nexustile.bin')
with open(test_file, 'r') as f:
nexustile_str = f.read()
results = list(self.module.transform(None, nexustile_str))
self.assertEquals(1, len(results))
nexus_tile = nexusproto.NexusTile.FromString(results[0])
self.assertTrue(nexus_tile.HasField('tile'))
self.assertTrue(nexus_tile.tile.HasField('swath_tile'))
# Check data
tile_data = np.ma.masked_invalid(from_shaped_array(nexus_tile.tile.swath_tile.variable_data))
self.assertEquals(82, np.ma.count(tile_data))
# Check meta data
meta_list = nexus_tile.tile.swath_tile.meta_data
self.assertEquals(3, len(meta_list))
wind_dir = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_dir')
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_dir.meta_data)).shape)
self.assertIsNotNone(wind_dir)
wind_speed = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_speed')
self.assertIsNotNone(wind_speed)
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_speed.meta_data)).shape)
wind_v = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_v')
self.assertIsNotNone(wind_v)
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_v.meta_data)).shape)
class TestAscatbVData(unittest.TestCase):
def setUp(self):
environ['U_OR_V'] = 'V'
environ['INBOUND_PORT'] = '7890'
environ['OUTBOUND_PORT'] = '7891'
self.module = importlib.import_module('nexusxd.winddirspeedtouv')
reload(self.module)
def tearDown(self):
del environ['U_OR_V']
del environ['INBOUND_PORT']
del environ['OUTBOUND_PORT']
def test_u_conversion(self):
test_file = path.join(path.dirname(__file__), 'dumped_nexustiles', 'ascatb_nonempty_nexustile.bin')
with open(test_file, 'r') as f:
nexustile_str = f.read()
results = list(self.module.transform(None, nexustile_str))
self.assertEquals(1, len(results))
nexus_tile = nexusproto.NexusTile.FromString(results[0])
self.assertTrue(nexus_tile.HasField('tile'))
self.assertTrue(nexus_tile.tile.HasField('swath_tile'))
# Check data
tile_data = np.ma.masked_invalid(from_shaped_array(nexus_tile.tile.swath_tile.variable_data))
self.assertEquals(82, np.ma.count(tile_data))
# Check meta data
meta_list = nexus_tile.tile.swath_tile.meta_data
self.assertEquals(3, len(meta_list))
wind_dir = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_dir')
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_dir.meta_data)).shape)
self.assertIsNotNone(wind_dir)
wind_speed = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_speed')
self.assertIsNotNone(wind_speed)
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_speed.meta_data)).shape)
wind_u = next(meta_obj for meta_obj in meta_list if meta_obj.name == 'wind_u')
self.assertIsNotNone(wind_u)
self.assertEquals(tile_data.shape, np.ma.masked_invalid(from_shaped_array(wind_u.meta_data)).shape)
if __name__ == '__main__':
unittest.main()
| 39.018349
| 111
| 0.698331
| 578
| 4,253
| 4.849481
| 0.178201
| 0.044952
| 0.048163
| 0.048519
| 0.861934
| 0.861934
| 0.861934
| 0.861934
| 0.861934
| 0.834106
| 0
| 0.00903
| 0.192805
| 4,253
| 108
| 112
| 39.37963
| 0.807457
| 0.036915
| 0
| 0.756757
| 0
| 0
| 0.090086
| 0.025949
| 0
| 0
| 0
| 0
| 0.297297
| 1
| 0.081081
| false
| 0
| 0.108108
| 0
| 0.216216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe7ccb05824470f29ec794a5257b3e3f248f451b
| 66,281
|
py
|
Python
|
portia_server/portia_orm/tests/test_relationship.py
|
inkicchj/portia
|
606467d278eab2236afcb3d260cb03bf6fb906a0
|
[
"BSD-3-Clause"
] | 6,390
|
2015-01-01T17:05:13.000Z
|
2022-03-31T08:20:12.000Z
|
portia_server/portia_orm/tests/test_relationship.py
|
Arindamdeb2000/portia
|
606467d278eab2236afcb3d260cb03bf6fb906a0
|
[
"BSD-3-Clause"
] | 442
|
2015-01-04T17:32:20.000Z
|
2022-03-15T21:21:23.000Z
|
portia_server/portia_orm/tests/test_relationship.py
|
Arindamdeb2000/portia
|
606467d278eab2236afcb3d260cb03bf6fb906a0
|
[
"BSD-3-Clause"
] | 1,288
|
2015-01-09T05:54:20.000Z
|
2022-03-31T03:21:51.000Z
|
import json
from unittest import mock
from .models import (OneToOneModel1, OneToOneModel2, ParentModel, ChildModel,
ManyToManyModel1, ManyToManyModel2, PolymorphicParentModel,
PolymorphicChildModel1, PolymorphicChildModel2)
from .utils import DataStoreTestCase, mock_storage
class OneToOneRelationshipTests(DataStoreTestCase):
def setUp(self):
super(OneToOneRelationshipTests, self).setUp()
self.storage = mock_storage({
'o2o-model-1.json':
'{'
' "id": "model-1",'
' "field": "model-1",'
' "m2": "model-2"'
'}',
'o2o-model-2.json':
'{'
' "id": "model-2",'
' "field": "model-2",'
' "m1": {'
' "id": "model-1",'
' "field": "model-1",'
' "m2": "model-2"'
' }'
'}',
})
def test_no_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2')
self.assertEqual(model1.m2, None)
self.assertEqual(model2.m1, None)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': None,
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': None,
})
def test_set_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2')
model2.m1 = model1
self.assertEqual(model1.m2, model2)
self.assertEqual(model2.m1, model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': 'model-2',
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-1',
'm2': 'model-2',
},
})
def test_set_reverse_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2')
model1.m2 = model2
self.assertEqual(model1.m2, model2)
self.assertEqual(model2.m1, model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': 'model-2',
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-1',
'm2': 'model-2',
},
})
def test_create_with_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2', m1=model1)
self.assertEqual(model1.m2, model2)
self.assertEqual(model2.m1, model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': 'model-2',
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-1',
'm2': 'model-2',
},
})
def test_create_with_reverse_relation(self):
model2 = OneToOneModel2(id='model-2')
model1 = OneToOneModel1(id='model-1', m2=model2)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': 'model-2',
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-1',
'm2': 'model-2',
},
})
def test_change_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2', m1=model1)
model3 = OneToOneModel1(id='model-3')
self.assertEqual(model1.m2, model2)
self.assertEqual(model2.m1, model1)
self.assertEqual(model3.m2, None)
model2.m1 = model3
self.assertEqual(model1.m2, None)
self.assertEqual(model2.m1, model3)
self.assertEqual(model3.m2, model2)
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-3',
'm2': 'model-2',
},
})
def test_change_reverse_relation(self):
model1 = OneToOneModel1(id='model-1')
model2 = OneToOneModel2(id='model-2', m1=model1)
model3 = OneToOneModel1(id='model-3')
self.assertEqual(model1.m2, model2)
self.assertEqual(model2.m1, model1)
self.assertEqual(model3.m2, None)
model3.m2 = model2
self.assertEqual(model1.m2, None)
self.assertEqual(model2.m1, model3)
self.assertEqual(model3.m2, model2)
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': {
'id': 'model-3',
'm2': 'model-2',
},
})
def test_load_full(self):
model = OneToOneModel2(self.storage, id='model-2')
self.assertEqual(model.dump(), {
'id': 'model-2',
'field': 'model-2',
'm1': {
'id': 'model-1',
'field': 'model-1',
'm2': 'model-2',
},
})
self.storage.open.assert_called_once_with('o2o-model-2.json')
def test_load_partial(self):
model = OneToOneModel1(self.storage, id='model-1')
self.assertEqual(model.dump(), {
'id': 'model-1',
'field': 'model-1',
'm2': 'model-2',
})
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('o2o-model-1.json'),
mock.call('o2o-model-2.json')])
def test_save_field(self):
model1 = OneToOneModel1(self.storage, id='model-1')
model2 = model1.m2
model1.field = 'changed-field-1'
model2.field = 'changed-field-2'
model2.save()
self.assertEqual(self.storage.save.call_count, 1)
self.storage.save.assert_has_calls([
mock.call('o2o-model-2.json', mock.ANY)])
self.assertEqual(
self.storage.files['o2o-model-2.json'],
'{\n'
' "field": "changed-field-2", \n'
' "id": "model-2", \n'
' "m1": {\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": "model-2"\n'
' }\n'
'}')
model1.save()
self.assertEqual(self.storage.save.call_count, 3)
self.storage.save.assert_has_calls([
mock.call('o2o-model-2.json', mock.ANY),
mock.call('o2o-model-1.json', mock.ANY),
mock.call('o2o-model-2.json', mock.ANY)])
self.assertEqual(
self.storage.files['o2o-model-1.json'],
'{\n'
' "field": "changed-field-1", \n'
' "id": "model-1", \n'
' "m2": "model-2"\n'
'}')
self.assertEqual(
self.storage.files['o2o-model-2.json'],
'{\n'
' "field": "changed-field-2", \n'
' "id": "model-2", \n'
' "m1": {\n'
' "field": "changed-field-1", \n'
' "id": "model-1", \n'
' "m2": "model-2"\n'
' }\n'
'}')
def test_save_id(self):
model1 = OneToOneModel1(self.storage, id='model-1')
model2 = model1.m2
model1.id = 'changed-id-1'
model2.id = 'changed-id-2'
model2.save()
self.assertEqual(self.storage.save.call_count, 2)
self.storage.save.assert_has_calls([
mock.call('o2o-model-2.json', mock.ANY),
mock.call('o2o-model-1.json', mock.ANY)])
self.assertEqual(
self.storage.files['o2o-model-1.json'],
'{\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": "changed-id-2"\n'
'}')
self.assertEqual(
self.storage.files['o2o-model-2.json'],
'{\n'
' "field": "model-2", \n'
' "id": "changed-id-2", \n'
' "m1": {\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": "changed-id-2"\n'
' }\n'
'}')
model1.save()
self.assertEqual(self.storage.save.call_count, 4)
self.storage.save.assert_has_calls([
mock.call('o2o-model-2.json', mock.ANY),
mock.call('o2o-model-1.json', mock.ANY),
mock.call('o2o-model-1.json', mock.ANY),
mock.call('o2o-model-2.json', mock.ANY)])
self.assertEqual(
self.storage.files['o2o-model-1.json'],
'{\n'
' "field": "model-1", \n'
' "id": "changed-id-1", \n'
' "m2": "changed-id-2"\n'
'}')
self.assertEqual(
self.storage.files['o2o-model-2.json'],
'{\n'
' "field": "model-2", \n'
' "id": "changed-id-2", \n'
' "m1": {\n'
' "field": "model-1", \n'
' "id": "changed-id-1", \n'
' "m2": "changed-id-2"\n'
' }\n'
'}')
class OneToManyRelationshipTests(DataStoreTestCase):
def setUp(self):
super(OneToManyRelationshipTests, self).setUp()
self.storage = mock_storage({
'parents.json':
'{'
' "id": "parent-1",'
' "field": "parent-1",'
' "children": ['
' {'
' "id": "child-1",'
' "parent": "parent-1"'
' }'
' ]'
'}',
'parent-1/children.json':
'['
' {'
' "id": "child-1",'
' "field": "child-1",'
' "parent": "parent-1"'
' }'
']',
})
def test_no_children(self):
parent = ParentModel(id='parent-1')
self.assertEqual(len(parent.children), 0)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [],
})
def test_set_children(self):
parent = ParentModel(id='parent-1')
child = ChildModel(id='child-1')
parent.children = [child]
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(parent.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
],
})
def test_add_to_children(self):
parent = ParentModel(id='parent-1')
child = ChildModel(id='child-1')
parent.children.add(child)
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(parent.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
],
})
def test_set_parent(self):
parent = ParentModel(id='parent-1')
child = ChildModel(id='child-1')
child.parent = parent
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(parent.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
],
})
def test_create_with_children(self):
child = ChildModel(id='child-1')
parent = ParentModel(id='parent-1', children=[child])
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(parent.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
],
})
def test_create_with_parent(self):
parent = ParentModel(id='parent-1')
child = ChildModel(id='child-1', parent=parent)
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(parent.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
],
})
def test_change_parent(self):
parent = ParentModel(id='parent-1')
parent2 = ParentModel(id='parent-2')
child = ChildModel(id='child-1', parent=parent)
self.assertEqual(child.parent, parent)
self.assertEqual(len(parent.children), 1)
self.assertEqual(len(parent2.children), 0)
child.parent = parent2
self.assertEqual(child.parent, parent2)
self.assertEqual(len(parent.children), 0)
self.assertEqual(len(parent2.children), 1)
self.assertEqual(parent2.children[0], child)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [],
})
self.assertEqual(parent2.dump(), {
'id': 'parent-2',
'children': [
{
'id': 'child-1',
'parent': 'parent-2',
},
],
})
def test_change_children(self):
parent = ParentModel(id='parent-1')
child = ChildModel(id='child-1', parent=parent)
child2 = ChildModel(id='child-2')
self.assertEqual(child.parent, parent)
self.assertEqual(child2.parent, None)
self.assertEqual(len(parent.children), 1)
parent.children = [child, child2]
self.assertEqual(child.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'id': 'child-1',
'parent': 'parent-1',
},
{
'id': 'child-2',
'parent': 'parent-1',
},
],
})
def test_getitem(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
self.assertIs(parent.children[0], child1)
self.assertIs(parent.children['child-1'], child1)
self.assertIs(parent.children[child1], child1)
with self.assertRaises(IndexError):
parent.children[1000]
with self.assertRaises(KeyError):
parent.children['child-4']
self.assertEqual(parent.children[1:], [child2, child3])
def test_get(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
self.assertIs(parent.children.get(0), child1)
self.assertIs(parent.children.get('child-1'), child1)
self.assertIs(parent.children.get(child1), child1)
self.assertIs(parent.children.get('child-4'), None)
sentinel = object()
self.assertIs(parent.children.get('child-4', default=sentinel), sentinel)
def test_setitem(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
child1b = ChildModel(id='child-1')
child1c = ChildModel(id='child-1')
child1d = ChildModel(id='child-1')
child4 = ChildModel(id='child-4')
child5 = ChildModel(id='child-5')
child6 = ChildModel(id='child-6')
child7 = ChildModel(id='child-7')
parent.children[0] = child1b
self.assertIs(parent.children[0], child1b)
parent.children['child-1'] = child1c
self.assertIs(parent.children[0], child1c)
parent.children[child1] = child1d
self.assertIs(parent.children[0], child1d)
self.assertListEqual(parent.children, [child1d, child2, child3])
parent.children[1:1] = [child4, child5]
self.assertIs(child4.parent, parent)
self.assertIs(child5.parent, parent)
self.assertListEqual(parent.children,
[child1d, child4, child5, child2, child3])
parent.children[:2] = [child6, child7]
self.assertIs(child6.parent, parent)
self.assertIs(child7.parent, parent)
self.assertIs(child1d.parent, None)
self.assertIs(child4.parent, None)
self.assertListEqual(parent.children,
[child6, child7, child5, child2, child3])
with self.assertRaises(ValueError):
parent.children[0:0] = [child2]
def test_delitem(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
child4 = ChildModel(id='child-4')
child5 = ChildModel(id='child-5')
parent = ParentModel(id='parent-1', children=[
child1, child2, child3, child4, child5])
del parent.children[0]
del parent.children['child-3']
del parent.children[child4]
self.assertListEqual(parent.children, [child2, child5])
self.assertIs(child1.parent, None)
self.assertIs(child3.parent, None)
self.assertIs(child4.parent, None)
def test_append(self):
child1 = ChildModel(id='child-1')
child1b = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2])
parent.children.append(child3)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child3.parent, parent)
with self.assertRaises(ValueError):
parent.children.append(child1b)
def test_add(self):
child1 = ChildModel(id='child-1')
child1b = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2])
parent.children.add(child3)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child3.parent, parent)
parent.children.add(child1b)
self.assertListEqual(parent.children, [child1, child2, child3])
def test_insert(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child2, child3])
parent.children.insert(0, child1)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child1.parent, parent)
def test_remove(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
parent.children.remove(child1)
self.assertListEqual(parent.children, [child2, child3])
self.assertIs(child1.parent, None)
with self.assertRaises(ValueError):
parent.children.remove(child1)
def test_discard(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
parent.children.discard(child1)
self.assertListEqual(parent.children, [child2, child3])
self.assertIs(child1.parent, None)
parent.children.discard(child1)
self.assertListEqual(parent.children, [child2, child3])
def test_pop(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
pop1 = parent.children.pop()
self.assertIs(pop1, child3)
self.assertListEqual(parent.children, [child1, child2])
self.assertIs(child3.parent, None)
pop2 = parent.children.pop('child-1')
self.assertIs(pop2, child1)
self.assertListEqual(parent.children, [child2])
self.assertIs(child1.parent, None)
def test_clear(self):
child1 = ChildModel(id='child-1')
child2 = ChildModel(id='child-2')
child3 = ChildModel(id='child-3')
parent = ParentModel(id='parent-1', children=[child1, child2, child3])
parent.children.clear()
self.assertListEqual(parent.children, [])
self.assertIs(child1.parent, None)
self.assertIs(child2.parent, None)
self.assertIs(child3.parent, None)
def test_load_full(self):
model = ParentModel(self.storage, id='parent-1')
self.assertEqual(model.dump(), {
'id': 'parent-1',
'field': 'parent-1',
'children': [
{
'id': 'child-1',
'field': 'child-1',
'parent': 'parent-1',
},
],
})
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('parents.json'),
mock.call('parent-1/children.json')])
def test_load_partial(self):
model = ChildModel(self.storage, id='child-1',
parent=ParentModel(self.storage, id='parent-1'))
self.assertEqual(model.dump(), {
'id': 'child-1',
'field': 'child-1',
'parent': 'parent-1',
})
self.assertEqual(model, model.parent.children[0])
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('parents.json'),
mock.call('parent-1/children.json')])
self.assertEqual(model.parent.dump(), {
'id': 'parent-1',
'field': 'parent-1',
'children': [
{
'id': 'child-1',
'field': 'child-1',
'parent': 'parent-1',
},
],
})
def test_save_field(self):
parent = ParentModel(self.storage, id='parent-1')
child = parent.children[0]
child.field = 'changed-id-1'
parent.field = 'changed-id-2'
parent.save()
self.assertEqual(self.storage.save.call_count, 1)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY)])
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "field": "child-1", \n'
' "id": "child-1", \n'
' "parent": "parent-1"\n'
' }\n'
' ], \n'
' "field": "changed-id-2", \n'
' "id": "parent-1"\n'
'}')
child.save()
self.assertEqual(self.storage.save.call_count, 3)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('parent-1/children.json', mock.ANY),
mock.call('parents.json', mock.ANY)])
self.assertEqual(
self.storage.files['parent-1/children.json'],
'[\n'
' {\n'
' "field": "changed-id-1", \n'
' "id": "child-1", \n'
' "parent": "parent-1"\n'
' }\n'
']')
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "field": "changed-id-1", \n'
' "id": "child-1", \n'
' "parent": "parent-1"\n'
' }\n'
' ], \n'
' "field": "changed-id-2", \n'
' "id": "parent-1"\n'
'}')
def test_save_id(self):
parent = ParentModel(self.storage, id='parent-1')
child = parent.children[0]
child.id = 'changed-id-1'
parent.id = 'changed-id-2'
parent.save()
self.assertEqual(self.storage.save.call_count, 2)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('changed-id-2/children.json', mock.ANY)])
self.storage.delete.assert_called_once_with('parent-1/children.json')
self.assertEqual(
self.storage.files['changed-id-2/children.json'],
'[\n'
' {\n'
' "field": "child-1", \n'
' "id": "child-1", \n'
' "parent": "changed-id-2"\n'
' }\n'
']')
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "field": "child-1", \n'
' "id": "child-1", \n'
' "parent": "changed-id-2"\n'
' }\n'
' ], \n'
' "field": "parent-1", \n'
' "id": "changed-id-2"\n'
'}')
child.save()
self.assertEqual(self.storage.save.call_count, 4)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('changed-id-2/children.json', mock.ANY),
mock.call('changed-id-2/children.json', mock.ANY),
mock.call('parents.json', mock.ANY)])
self.assertEqual(
self.storage.files['changed-id-2/children.json'],
'[\n'
' {\n'
' "field": "child-1", \n'
' "id": "changed-id-1", \n'
' "parent": "changed-id-2"\n'
' }\n'
']')
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "field": "child-1", \n'
' "id": "changed-id-1", \n'
' "parent": "changed-id-2"\n'
' }\n'
' ], \n'
' "field": "parent-1", \n'
' "id": "changed-id-2"\n'
'}')
class ManyToManyRelationshipTests(DataStoreTestCase):
def setUp(self):
super(ManyToManyRelationshipTests, self).setUp()
self.storage = mock_storage({
'm2m-model-1.json':
'{'
' "id": "model-1",'
' "field": "model-1",'
' "m2": ['
' "model-2"'
' ]'
'}',
'm2m-model-2.json':
'['
' {'
' "id": "model-2",'
' "field": "model-2",'
' "m1": ['
' {'
' "id": "model-1",'
' "field": "model-1",'
' "m2": ['
' "model-2"'
' ]'
' }'
' ]'
' }'
']',
})
def test_no_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2')
self.assertEqual(len(model1.m2), 0)
self.assertEqual(len(model2.m1), 0)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [],
})
def test_set_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2')
model2.m1.append(model1)
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
],
})
def test_set_reverse_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2')
model1.m2.append(model2)
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
],
})
def test_create_with_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2', m1=[model1])
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
],
})
def test_create_with_reverse_relation(self):
model2 = ManyToManyModel2(id='model-2')
model1 = ManyToManyModel1(id='model-1', m2=[model2])
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
],
})
def test_change_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2', m1=[model1])
model3 = ManyToManyModel1(id='model-3')
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(len(model3.m2), 0)
model2.m1.append(model3)
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 2)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model2.m1[1], model3)
self.assertEqual(len(model3.m2), 1)
self.assertEqual(model3.m2[0], model2)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
{
'id': 'model-3',
'm2': [
'model-2',
],
},
],
})
self.assertEqual(model3.dump(), {
'id': 'model-3',
'm2': [
'model-2',
],
})
model2.m1.remove(model1)
self.assertEqual(len(model1.m2), 0)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model3)
self.assertEqual(len(model3.m2), 1)
self.assertEqual(model3.m2[0], model2)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-3',
'm2': [
'model-2',
],
},
],
})
self.assertEqual(model3.dump(), {
'id': 'model-3',
'm2': [
'model-2',
],
})
def test_change_reverse_relation(self):
model1 = ManyToManyModel1(id='model-1')
model2 = ManyToManyModel2(id='model-2', m1=[model1])
model3 = ManyToManyModel1(id='model-3')
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(len(model3.m2), 0)
model3.m2.append(model2)
self.assertEqual(len(model1.m2), 1)
self.assertEqual(model1.m2[0], model2)
self.assertEqual(len(model2.m1), 2)
self.assertEqual(model2.m1[0], model1)
self.assertEqual(model2.m1[1], model3)
self.assertEqual(len(model3.m2), 1)
self.assertEqual(model3.m2[0], model2)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-1',
'm2': [
'model-2',
],
},
{
'id': 'model-3',
'm2': [
'model-2',
],
},
],
})
self.assertEqual(model3.dump(), {
'id': 'model-3',
'm2': [
'model-2',
],
})
model1.m2.clear()
self.assertEqual(len(model1.m2), 0)
self.assertEqual(len(model2.m1), 1)
self.assertEqual(model2.m1[0], model3)
self.assertEqual(len(model3.m2), 1)
self.assertEqual(model3.m2[0], model2)
self.assertEqual(model1.dump(), {
'id': 'model-1',
'm2': [],
})
self.assertEqual(model2.dump(), {
'id': 'model-2',
'm1': [
{
'id': 'model-3',
'm2': [
'model-2',
],
},
],
})
self.assertEqual(model3.dump(), {
'id': 'model-3',
'm2': [
'model-2',
],
})
def test_load_full(self):
model = ManyToManyModel2(self.storage, id='model-2')
self.assertEqual(model.dump(), {
'id': 'model-2',
'field': 'model-2',
'm1': [
{
'id': 'model-1',
'field': 'model-1',
'm2': [
'model-2',
],
},
],
})
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('m2m-model-2.json'),
mock.call('m2m-model-1.json')])
def test_load_partial(self):
model = ManyToManyModel1(self.storage, id='model-1')
self.assertEqual(model.dump(), {
'id': 'model-1',
'field': 'model-1',
'm2': [
'model-2',
],
})
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('m2m-model-1.json'),
mock.call('m2m-model-2.json')])
def test_save_field(self):
model1 = ManyToManyModel1(self.storage, id='model-1')
model2 = model1.m2[0]
model1.field = 'changed-field-1'
model2.field = 'changed-field-2'
model2.save()
self.storage.save.assert_called_once_with('m2m-model-2.json', mock.ANY)
self.assertEqual(
self.storage.files['m2m-model-2.json'],
'[\n'
' {\n'
' "field": "changed-field-2", \n'
' "id": "model-2", \n'
' "m1": [\n'
' {\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": [\n'
' "model-2"\n'
' ]\n'
' }\n'
' ]\n'
' }\n'
']')
model1.save()
self.assertEqual(self.storage.save.call_count, 3)
self.storage.save.assert_has_calls([
mock.call('m2m-model-2.json', mock.ANY),
mock.call('m2m-model-1.json', mock.ANY),
mock.call('m2m-model-2.json', mock.ANY)])
self.assertEqual(
self.storage.files['m2m-model-1.json'],
'{\n'
' "field": "changed-field-1", \n'
' "id": "model-1", \n'
' "m2": [\n'
' "model-2"\n'
' ]\n'
'}')
self.assertEqual(
self.storage.files['m2m-model-2.json'],
'[\n'
' {\n'
' "field": "changed-field-2", \n'
' "id": "model-2", \n'
' "m1": [\n'
' {\n'
' "field": "changed-field-1", \n'
' "id": "model-1", \n'
' "m2": [\n'
' "model-2"\n'
' ]\n'
' }\n'
' ]\n'
' }\n'
']')
def test_save_id(self):
model1 = ManyToManyModel1(self.storage, id='model-1')
model2 = model1.m2[0]
model1.id = 'changed-id-1'
model2.id = 'changed-id-2'
model2.save()
self.assertEqual(self.storage.save.call_count, 2)
self.storage.save.assert_has_calls([
mock.call('m2m-model-2.json', mock.ANY),
mock.call('m2m-model-1.json', mock.ANY)])
self.assertEqual(
self.storage.files['m2m-model-1.json'],
'{\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": [\n'
' "changed-id-2"\n'
' ]\n'
'}')
self.assertEqual(
self.storage.files['m2m-model-2.json'],
'[\n'
' {\n'
' "field": "model-2", \n'
' "id": "changed-id-2", \n'
' "m1": [\n'
' {\n'
' "field": "model-1", \n'
' "id": "model-1", \n'
' "m2": [\n'
' "changed-id-2"\n'
' ]\n'
' }\n'
' ]\n'
' }\n'
']')
model1.save()
self.assertEqual(self.storage.save.call_count, 4)
self.storage.save.assert_has_calls([
mock.call('m2m-model-2.json', mock.ANY),
mock.call('m2m-model-1.json', mock.ANY),
mock.call('m2m-model-1.json', mock.ANY),
mock.call('m2m-model-2.json', mock.ANY)])
self.assertEqual(
self.storage.files['m2m-model-1.json'],
'{\n'
' "field": "model-1", \n'
' "id": "changed-id-1", \n'
' "m2": [\n'
' "changed-id-2"\n'
' ]\n'
'}')
self.assertEqual(
self.storage.files['m2m-model-2.json'],
'[\n'
' {\n'
' "field": "model-2", \n'
' "id": "changed-id-2", \n'
' "m1": [\n'
' {\n'
' "field": "model-1", \n'
' "id": "changed-id-1", \n'
' "m2": [\n'
' "changed-id-2"\n'
' ]\n'
' }\n'
' ]\n'
' }\n'
']')
class PolymorphicRelationshipTests(DataStoreTestCase):
def setUp(self):
super(PolymorphicRelationshipTests, self).setUp()
self.storage = mock_storage({
'parents.json':
'{'
' "id": "parent-1",'
' "field": "parent-1",'
' "children": ['
' {'
' "type": "PolymorphicChildModel1",'
' "id": "child-1"'
' },'
' {'
' "_type_": "PolymorphicChildModel2",'
' "id": "child-2"'
' }'
' ]'
'}',
'children.json':
'['
' {'
' "type": "PolymorphicChildModel1",'
' "id": "child-1",'
' "field1": "child-1",'
' "parent": "parent-1"'
' },'
' {'
' "_type_": "PolymorphicChildModel2",'
' "id": "child-2",'
' "field2": "child-2",'
' "parent": "parent-1"'
' }'
']',
})
def test_no_children(self):
parent = PolymorphicParentModel(id='parent-1')
self.assertEqual(len(parent.children), 0)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [],
})
def test_set_children(self):
parent = PolymorphicParentModel(id='parent-1')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1')
child2 = PolymorphicChildModel2(id='child-2', field2='field-2')
parent.children = [child1, child2]
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.children[0], child1)
self.assertEqual(parent.children[1], child2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
def test_add_to_children(self):
parent = PolymorphicParentModel(id='parent-1')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1')
child2 = PolymorphicChildModel2(id='child-2', field2='field-2')
parent.children.add(child2)
parent.children.add(child1)
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.children[0], child2)
self.assertEqual(parent.children[1], child1)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
],
})
def test_set_parent(self):
parent = PolymorphicParentModel(id='parent-1')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1')
child2 = PolymorphicChildModel2(id='child-2', field2='field-2')
child1.parent = parent
child2.parent = parent
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.children[0], child1)
self.assertEqual(parent.children[1], child2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
def test_create_with_children(self):
child1 = PolymorphicChildModel1(id='child-1', field1='field-1')
child2 = PolymorphicChildModel2(id='child-2', field2='field-2')
parent = PolymorphicParentModel(id='parent-1',
children=[child1, child2])
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.children[0], child1)
self.assertEqual(parent.children[1], child2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
def test_create_with_parent(self):
parent = PolymorphicParentModel(id='parent-1')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1',
parent=parent)
child2 = PolymorphicChildModel2(id='child-2', field2='field-2',
parent=parent)
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.children[0], child1)
self.assertEqual(parent.children[1], child2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
def test_change_parent(self):
parent1 = PolymorphicParentModel(id='parent-1')
parent2 = PolymorphicParentModel(id='parent-2')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1',
parent=parent1)
child2 = PolymorphicChildModel2(id='child-2', field2='field-2',
parent=parent1)
self.assertEqual(child1.parent, parent1)
self.assertEqual(child2.parent, parent1)
self.assertEqual(len(parent1.children), 2)
self.assertEqual(len(parent2.children), 0)
child2.parent = parent2
self.assertEqual(child1.parent, parent1)
self.assertEqual(child2.parent, parent2)
self.assertEqual(len(parent1.children), 1)
self.assertEqual(len(parent2.children), 1)
self.assertEqual(parent1.children[0], child1)
self.assertEqual(parent2.children[0], child2)
self.assertEqual(parent1.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
],
})
self.assertEqual(parent2.dump(), {
'id': 'parent-2',
'children': [
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
child1.parent = parent2
self.assertEqual(child1.parent, parent2)
self.assertEqual(child2.parent, parent2)
self.assertEqual(len(parent1.children), 0)
self.assertEqual(len(parent2.children), 2)
self.assertEqual(parent2.children[0], child2)
self.assertEqual(parent2.children[1], child1)
self.assertEqual(parent1.dump(), {
'id': 'parent-1',
'children': [],
})
self.assertEqual(parent2.dump(), {
'id': 'parent-2',
'children': [
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
],
})
def test_change_children(self):
parent = PolymorphicParentModel(id='parent-1')
child1 = PolymorphicChildModel1(id='child-1', field1='field-1',
parent=parent)
child2 = PolymorphicChildModel2(id='child-2', field2='field-2')
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, None)
self.assertEqual(len(parent.children), 1)
parent.children = [child1, child2]
self.assertEqual(child1.parent, parent)
self.assertEqual(child2.parent, parent)
self.assertEqual(len(parent.children), 2)
self.assertEqual(parent.dump(), {
'id': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
def test_getitem(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel2(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
self.assertIs(parent.children[0], child1)
self.assertIs(parent.children['child-1'], child1)
self.assertIs(parent.children[child1], child1)
self.assertIs(parent.children[1], child2)
self.assertIs(parent.children['child-2'], child2)
self.assertIs(parent.children[child2], child2)
with self.assertRaises(IndexError):
parent.children[1000]
with self.assertRaises(KeyError):
parent.children['child-4']
self.assertEqual(parent.children[2:], [child3])
def test_get(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel2(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
self.assertIs(parent.children.get(0), child1)
self.assertIs(parent.children.get('child-1'), child1)
self.assertIs(parent.children.get(child1), child1)
self.assertIs(parent.children.get(1), child2)
self.assertIs(parent.children.get('child-2'), child2)
self.assertIs(parent.children.get(child2), child2)
self.assertIs(parent.children.get('child-4'), None)
sentinel = object()
self.assertIs(parent.children.get('child-4', default=sentinel), sentinel)
def test_setitem(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel2(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
child1b = PolymorphicChildModel1(id='child-1')
child1c = PolymorphicChildModel1(id='child-1')
child1d = PolymorphicChildModel1(id='child-1')
child4 = PolymorphicChildModel2(id='child-4')
child5 = PolymorphicChildModel1(id='child-5')
child6 = PolymorphicChildModel2(id='child-6')
child7 = PolymorphicChildModel1(id='child-7')
parent.children[0] = child1b
self.assertIs(parent.children[0], child1b)
parent.children['child-1'] = child1c
self.assertIs(parent.children[0], child1c)
parent.children[child1] = child1d
self.assertIs(parent.children[0], child1d)
self.assertListEqual(parent.children, [child1d, child2, child3])
parent.children[1:1] = [child4, child5]
self.assertIs(child4.parent, parent)
self.assertIs(child5.parent, parent)
self.assertListEqual(parent.children,
[child1d, child4, child5, child2, child3])
parent.children[:2] = [child6, child7]
self.assertIs(child6.parent, parent)
self.assertIs(child7.parent, parent)
self.assertIs(child1d.parent, None)
self.assertIs(child4.parent, None)
self.assertListEqual(parent.children,
[child6, child7, child5, child2, child3])
with self.assertRaises(ValueError):
parent.children[0:0] = [child2]
def test_delitem(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel2(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
child4 = PolymorphicChildModel2(id='child-4')
child5 = PolymorphicChildModel1(id='child-5')
parent = PolymorphicParentModel(id='parent-1', children=[
child1, child2, child3, child4, child5])
del parent.children[0]
del parent.children['child-3']
del parent.children[child4]
self.assertListEqual(parent.children, [child2, child5])
self.assertIs(child1.parent, None)
self.assertIs(child3.parent, None)
self.assertIs(child4.parent, None)
def test_append(self):
child1 = PolymorphicChildModel1(id='child-1')
child1b = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel2(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2])
parent.children.append(child3)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child3.parent, parent)
with self.assertRaises(ValueError):
parent.children.append(child1b)
def test_add(self):
child1 = PolymorphicChildModel1(id='child-1')
child1b = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel2(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2])
parent.children.add(child3)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child3.parent, parent)
parent.children.add(child1b)
self.assertListEqual(parent.children, [child1, child2, child3])
def test_insert(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child2, child3])
parent.children.insert(0, child1)
self.assertListEqual(parent.children, [child1, child2, child3])
self.assertIs(child1.parent, parent)
def test_remove(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
parent.children.remove(child1)
self.assertListEqual(parent.children, [child2, child3])
self.assertIs(child1.parent, None)
with self.assertRaises(ValueError):
parent.children.remove(child1)
def test_discard(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
parent.children.discard(child1)
self.assertListEqual(parent.children, [child2, child3])
self.assertIs(child1.parent, None)
parent.children.discard(child1)
self.assertListEqual(parent.children, [child2, child3])
def test_pop(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
pop1 = parent.children.pop()
self.assertIs(pop1, child3)
self.assertListEqual(parent.children, [child1, child2])
self.assertIs(child3.parent, None)
pop2 = parent.children.pop('child-1')
self.assertIs(pop2, child1)
self.assertListEqual(parent.children, [child2])
self.assertIs(child1.parent, None)
def test_clear(self):
child1 = PolymorphicChildModel1(id='child-1')
child2 = PolymorphicChildModel1(id='child-2')
child3 = PolymorphicChildModel1(id='child-3')
parent = PolymorphicParentModel(
id='parent-1', children=[child1, child2, child3])
parent.children.clear()
self.assertListEqual(parent.children, [])
self.assertIs(child1.parent, None)
self.assertIs(child2.parent, None)
self.assertIs(child3.parent, None)
def test_load_full(self):
model = PolymorphicParentModel(self.storage, id='parent-1')
self.assertEqual(model.dump(), {
'id': 'parent-1',
'field': 'parent-1',
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
})
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('parents.json'),
mock.call('children.json')])
def test_load_partial(self):
model = PolymorphicChildModel1(
self.storage, id='child-1', parent=PolymorphicParentModel(
self.storage, id='parent-1'))
self.assertEqual(model.dump(), {
'type': 'PolymorphicChildModel1',
'id': 'child-1',
'field1': 'child-1',
'parent': 'parent-1',
})
self.assertEqual(model, model.parent.children[0])
self.assertEqual(self.storage.open.call_count, 2)
self.storage.open.assert_has_calls([
mock.call('parents.json'),
mock.call('children.json')])
self.assertEqual(model.parent.dump(), {
'children': [
{
'type': 'PolymorphicChildModel1',
'id': 'child-1',
},
{
'_type_': 'PolymorphicChildModel2',
'id': 'child-2',
},
],
'field': 'parent-1',
'id': 'parent-1',
})
def test_save_field(self):
parent = PolymorphicParentModel(self.storage, id='parent-1')
child = parent.children[0]
child.field1 = 'changed-id-1'
parent.field = 'changed-id-2'
parent.save()
self.assertEqual(self.storage.save.call_count, 1)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY)])
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "id": "child-1", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "id": "child-2"\n'
' }\n'
' ], \n'
' "field": "changed-id-2", \n'
' "id": "parent-1"\n'
'}')
child.save()
self.assertEqual(self.storage.save.call_count, 2)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('children.json', mock.ANY)])
self.assertEqual(
self.storage.files['children.json'],
'[\n'
' {\n'
' "field1": "changed-id-1", \n'
' "id": "child-1", \n'
' "parent": "parent-1", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "field2": "child-2", \n'
' "id": "child-2", \n'
' "parent": "parent-1"\n'
' }\n'
']')
def test_save_id(self):
parent = PolymorphicParentModel(self.storage, id='parent-1')
child = parent.children[0]
child.id = 'changed-id-1'
parent.id = 'changed-id-2'
parent.save()
self.assertEqual(self.storage.save.call_count, 2)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('children.json', mock.ANY)])
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "id": "child-1", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "id": "child-2"\n'
' }\n'
' ], \n'
' "field": "parent-1", \n'
' "id": "changed-id-2"\n'
'}')
self.assertEqual(
self.storage.files['children.json'],
'[\n'
' {\n'
' "field1": "child-1", \n'
' "id": "child-1", \n'
' "parent": "changed-id-2", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "field2": "child-2", \n'
' "id": "child-2", \n'
' "parent": "changed-id-2"\n'
' }\n'
']')
child.save()
self.assertEqual(self.storage.save.call_count, 4)
self.storage.save.assert_has_calls([
mock.call('parents.json', mock.ANY),
mock.call('children.json', mock.ANY),
mock.call('children.json', mock.ANY),
mock.call('parents.json', mock.ANY)])
self.assertEqual(
self.storage.files['children.json'],
'[\n'
' {\n'
' "field1": "child-1", \n'
' "id": "changed-id-1", \n'
' "parent": "changed-id-2", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "field2": "child-2", \n'
' "id": "child-2", \n'
' "parent": "changed-id-2"\n'
' }\n'
']')
self.assertEqual(
self.storage.files['parents.json'],
'{\n'
' "children": [\n'
' {\n'
' "id": "changed-id-1", \n'
' "type": "PolymorphicChildModel1"\n'
' }, \n'
' {\n'
' "_type_": "PolymorphicChildModel2", \n'
' "id": "child-2"\n'
' }\n'
' ], \n'
' "field": "parent-1", \n'
' "id": "changed-id-2"\n'
'}')
| 33.696492
| 81
| 0.470663
| 6,142
| 66,281
| 5.039075
| 0.019212
| 0.128918
| 0.021195
| 0.041163
| 0.961002
| 0.943619
| 0.922229
| 0.906365
| 0.890469
| 0.873732
| 0
| 0.044866
| 0.376548
| 66,281
| 1,966
| 82
| 33.713632
| 0.704112
| 0
| 0
| 0.877082
| 0
| 0
| 0.186977
| 0.017049
| 0
| 0
| 0
| 0
| 0.229179
| 1
| 0.041356
| false
| 0
| 0.002298
| 0
| 0.045951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fea2f3dc0db0e62114792019806357c11f09021c
| 5,070
|
py
|
Python
|
worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py
|
luyijun/evennia_worldloader
|
79f3618a12265101ef5ff8c9182780874cfeb534
|
[
"BSD-3-Clause"
] | 1
|
2016-04-03T02:26:21.000Z
|
2016-04-03T02:26:21.000Z
|
worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py
|
luyijun/evennia_worldloader
|
79f3618a12265101ef5ff8c9182780874cfeb534
|
[
"BSD-3-Clause"
] | null | null | null |
worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py
|
luyijun/evennia_worldloader
|
79f3618a12265101ef5ff8c9182780874cfeb534
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='personal_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'Personal Object List',
'verbose_name_plural': 'Personal Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_details',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Detail List',
'verbose_name_plural': 'World Detail List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_exits',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Exit List',
'verbose_name_plural': 'World Exit List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Object List',
'verbose_name_plural': 'World Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_rooms',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Room List',
'verbose_name_plural': 'World Room List',
},
bases=(models.Model,),
),
]
| 45.267857
| 93
| 0.535897
| 471
| 5,070
| 5.622081
| 0.118896
| 0.198263
| 0.237915
| 0.317221
| 0.907855
| 0.856118
| 0.856118
| 0.856118
| 0.818353
| 0.776057
| 0
| 0.030725
| 0.319527
| 5,070
| 111
| 94
| 45.675676
| 0.736812
| 0.004142
| 0
| 0.742857
| 0
| 0
| 0.139885
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019048
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
22e3d8334724b10fba7f4af0202c37ffa0869da8
| 1,338
|
py
|
Python
|
site/stats/models.py
|
marcus-crane/site
|
33287005d19287f2851837e38bca974b4e6682bd
|
[
"MIT"
] | 2
|
2018-04-10T05:12:46.000Z
|
2018-05-06T20:12:31.000Z
|
site/stats/models.py
|
marcus-crane/site
|
33287005d19287f2851837e38bca974b4e6682bd
|
[
"MIT"
] | 8
|
2017-10-27T09:00:58.000Z
|
2017-11-19T09:58:17.000Z
|
site/stats/models.py
|
marcus-crane/site
|
33287005d19287f2851837e38bca974b4e6682bd
|
[
"MIT"
] | null | null | null |
from django.db import models
class Book(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
link = models.URLField()
author = models.CharField(max_length=200)
def __str__(self):
return "{0} by {1}".format(self.name, self.author)
class Game(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
link = models.URLField()
year = models.IntegerField()
platform = models.CharField(max_length=200)
def __str__(self):
return "{0} ({1})".format(self.name, self.year)
class Movie(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
link = models.URLField()
year = models.IntegerField()
def __str__(self):
return "{0} ({1})".format(self.name, self.year)
class Song(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
link = models.URLField()
artist = models.CharField(max_length=200)
def __str__(self):
return "{0} - {1}".format(self.name, self.artist)
class Show(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
link = models.URLField()
series = models.CharField(max_length=200)
def __str__(self):
return "{0} - {1}".format(self.name, self.series)
| 28.468085
| 58
| 0.657698
| 171
| 1,338
| 4.976608
| 0.19883
| 0.164512
| 0.190364
| 0.253819
| 0.875441
| 0.853114
| 0.853114
| 0.853114
| 0.853114
| 0.853114
| 0
| 0.034419
| 0.196562
| 1,338
| 47
| 59
| 28.468085
| 0.757209
| 0
| 0
| 0.648649
| 0
| 0
| 0.034354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135135
| false
| 0
| 0.027027
| 0.135135
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
a3cda7cdb7a9fe44648dce7db093770227dbe00e
| 4,313
|
py
|
Python
|
test/pyaz/appservice/ase/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/appservice/ase/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/appservice/ase/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def list(resource_group=None):
params = get_params(locals())
command = "az appservice ase list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_addresses(name, resource_group=None):
params = get_params(locals())
command = "az appservice ase list-addresses " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_plans(name, resource_group=None):
params = get_params(locals())
command = "az appservice ase list-plans " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(name, resource_group=None):
params = get_params(locals())
command = "az appservice ase show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def create(resource_group, name, subnet, kind=None, vnet_name=None, ignore_route_table=None, ignore_network_security_group=None, virtual_ip_type=None, front_end_scale_factor=None, front_end_sku=None, force_route_table=None, force_network_security_group=None, ignore_subnet_size_validation=None, location=None, os_preference=None, zone_redundant=None, no_wait=None):
params = get_params(locals())
command = "az appservice ase create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(name, resource_group=None, front_end_scale_factor=None, front_end_sku=None, no_wait=None):
params = get_params(locals())
command = "az appservice ase update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(name, resource_group=None, yes=None, no_wait=None):
params = get_params(locals())
command = "az appservice ase delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def create_inbound_services(resource_group, name, subnet, vnet_name=None, skip_dns=None):
params = get_params(locals())
command = "az appservice ase create-inbound-services " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 37.181034
| 365
| 0.678646
| 542
| 4,313
| 5.295203
| 0.136531
| 0.078049
| 0.055749
| 0.052962
| 0.838676
| 0.838676
| 0.838676
| 0.838676
| 0.838676
| 0.838676
| 0
| 0.004682
| 0.207744
| 4,313
| 115
| 366
| 37.504348
| 0.835236
| 0
| 0
| 0.830189
| 0
| 0
| 0.070716
| 0.005333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075472
| false
| 0
| 0.018868
| 0
| 0.169811
| 0.226415
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3d0324d02a20522ac08b797e2ebb8644a68dd3e
| 154
|
py
|
Python
|
discord/ext/commands/cooldowns.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/ext/commands/cooldowns.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/ext/commands/cooldowns.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
from disnake.ext.commands.cooldowns import *
from disnake.ext.commands.cooldowns import __dict__ as __original_dict__
locals().update(__original_dict__)
| 30.8
| 72
| 0.844156
| 20
| 154
| 5.8
| 0.55
| 0.189655
| 0.241379
| 0.37931
| 0.637931
| 0.637931
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077922
| 154
| 4
| 73
| 38.5
| 0.816901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a3f4f8580c58f953243df0ec10f5d6ece1335b18
| 44,849
|
py
|
Python
|
devilry/devilry_admin/tests/assignment/students/test_create_groups.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/devilry_admin/tests/assignment/students/test_create_groups.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/devilry_admin/tests/assignment/students/test_create_groups.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
import datetime
import htmls
import mock
from django import forms
from django.contrib import messages
from django.http import Http404
from django.test import TestCase
from django.utils import timezone
from cradmin_legacy import cradmin_testhelpers
from model_bakery import baker
from devilry.apps.core.models import AssignmentGroup, Candidate
from devilry.apps.core.baker_recipes import ACTIVE_PERIOD_START
from devilry.devilry_admin.views.assignment.students import create_groups
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.devilry_group import devilry_group_baker_factories
from devilry.devilry_group.models import FeedbackSet
class TestChooseMethod(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = create_groups.ChooseMethod
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
testassignment = baker.make('core.Assignment',
short_name='testassignment',
parentnode__short_name='testperiod',
parentnode__parentnode__short_name='testsubject')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertIn(
'Add students to testsubject.testperiod.testassignment',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
testassignment = baker.make('core.Assignment')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'Add students',
mockresponse.selector.one('h1').alltext_normalized)
def test_choices_sanity(self):
testassignment = baker.make('core.Assignment')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
3,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue a'))
def __mock_reverse_appurl(self):
def reverse_appurl(viewname, args, kwargs):
return '/{}/args={},kwargs={}'.format(viewname, args, kwargs)
return reverse_appurl
def test_choice_relatedstudents_url(self):
testassignment = baker.make('core.Assignment')
mockapp = mock.MagicMock()
mockapp.reverse_appurl = self.__mock_reverse_appurl()
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
cradmin_app=mockapp
)
self.assertEqual(
"/confirm/args=(),kwargs={'selected_students': 'relatedstudents'}",
mockresponse.selector.one(
'#devilry_admin_create_groups_choosemethod_relatedstudents_link')['href'])
def test_choice_relatedstudents_label(self):
testassignment = baker.make('core.Assignment',
parentnode__parentnode__short_name='testsubject')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'All students',
mockresponse.selector.one(
'#devilry_admin_create_groups_choosemethod_relatedstudents_link').alltext_normalized)
def test_choice_manual_select_value(self):
testassignment = baker.make('core.Assignment')
mockapp = mock.MagicMock()
mockapp.reverse_appurl = self.__mock_reverse_appurl()
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
cradmin_app=mockapp
)
self.assertEqual(
"/manual-select/args=(),kwargs={}",
mockresponse.selector.one(
'#devilry_admin_create_groups_choosemethod_manualselect_link')['href'])
def test_choice_manual_select_label(self):
testassignment = baker.make('core.Assignment')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'Select manually',
mockresponse.selector.one(
'#devilry_admin_create_groups_choosemethod_manualselect_link').alltext_normalized)
def test_choices_does_not_include_current_assignment(self):
testperiod = baker.make('core.Period')
otherassignment = baker.make('core.Assignment', parentnode=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
5,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue a'))
self.assertFalse(
mockresponse.selector.exists('#devilry_admin_create_groups_choosemethod_assignment_{}'.format(
testassignment.pk)))
self.assertTrue(
mockresponse.selector.exists('#devilry_admin_create_groups_choosemethod_assignment_{}'.format(
otherassignment.pk)))
def test_other_assignment_rending(self):
testperiod = baker.make('core.Period')
otherassignment = baker.make('core.Assignment', parentnode=testperiod,
short_name='otherassignment')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'All students',
mockresponse.selector.one('#devilry_admin_create_groups_choosemethod_assignment_{}_all_link'.format(
otherassignment.pk)).alltext_normalized)
self.assertEqual(
'Students with passing grade',
mockresponse.selector.one('#devilry_admin_create_groups_choosemethod_assignment_{}_passing_link'.format(
otherassignment.pk)).alltext_normalized)
def test_other_assignments_ordering(self):
testperiod = baker.make_recipe('devilry.apps.core.period_active')
assignment1 = baker.make('core.Assignment', parentnode=testperiod,
publishing_time=ACTIVE_PERIOD_START + datetime.timedelta(days=1))
assignment2 = baker.make('core.Assignment', parentnode=testperiod,
publishing_time=ACTIVE_PERIOD_START + datetime.timedelta(days=2))
assignment3 = baker.make('core.Assignment', parentnode=testperiod,
publishing_time=ACTIVE_PERIOD_START + datetime.timedelta(days=3))
assignment4 = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=assignment4)
assignmentboxes_dom_ids = [
element['id']
for element in mockresponse.selector.list('.devilry-admin-create-groups-choosemethod-assignment')]
self.assertEqual(
[
'devilry_admin_create_groups_choosemethod_assignment_{}'.format(assignment3.id),
'devilry_admin_create_groups_choosemethod_assignment_{}'.format(assignment2.id),
'devilry_admin_create_groups_choosemethod_assignment_{}'.format(assignment1.id),
],
assignmentboxes_dom_ids
)
class TestConfirmView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = create_groups.ConfirmView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
testassignment = baker.make('core.Assignment',
short_name='testassignment',
parentnode__short_name='testperiod',
parentnode__parentnode__short_name='testsubject')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertIn(
'Confirm that you want to add the following students to '
'testsubject.testperiod.testassignment',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
testassignment = baker.make('core.Assignment', long_name='Assignment One')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertEqual(
'Confirm that you want to add the following students to Assignment One',
mockresponse.selector.one('h1').alltext_normalized)
def test_get_subheader_selected_students_relateadstudents(self):
testassignment = baker.make('core.Assignment',
parentnode__parentnode__short_name='testsubject',
parentnode__short_name='testperiod')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertEqual(
'All students on testsubject.testperiod',
mockresponse.selector.one(
'#devilry_admin_create_groups_confirm_selected_student_label').alltext_normalized)
def test_get_subheader_selected_students_all_on_assignment(self):
testperiod = baker.make('core.Period')
otherassignment = baker.make('core.Assignment',
long_name='Assignment One',
parentnode=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
requestkwargs={'data': {'assignment': otherassignment.id}},
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_ALL_ON_ASSIGNMENT})
self.assertEqual(
'All students on Assignment One',
mockresponse.selector.one(
'#devilry_admin_create_groups_confirm_selected_student_label').alltext_normalized)
def test_get_subheader_selected_students_passing_grade_on_assignment(self):
testperiod = baker.make('core.Period')
otherassignment = baker.make('core.Assignment',
long_name='Assignment One',
parentnode=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
requestkwargs={'data': {'assignment': otherassignment.id}},
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_PASSING_GRADE_ON_ASSIGNMENT})
self.assertEqual(
'Students with passing grade on Assignment One',
mockresponse.selector.one(
'#devilry_admin_create_groups_confirm_selected_student_label').alltext_normalized)
def test_get_render_submitbutton(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertEqual(
'Add students',
mockresponse.selector.one(
'#devilry_admin_create_groups_confirm_form button[name="add_students"]').alltext_normalized
)
def test_get_render_form_selected_items_selected_students_relatedstudents(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent', period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent', period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
selected_relatedstudent_ids = [
element['value']
for element in mockresponse.selector.list(
'#devilry_admin_create_groups_confirm_form input[name=selected_items]')]
self.assertEqual(
{str(relatedstudent1.id), str(relatedstudent2.id)},
set(selected_relatedstudent_ids))
def test_get_render_form_selected_items_selected_students_all_on_assignment(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent', period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent', period=testperiod)
baker.make('core.RelatedStudent', period=testperiod)
otherassignment = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
relatedstudent=relatedstudent1,
assignment_group__parentnode=otherassignment)
baker.make('core.Candidate',
relatedstudent=relatedstudent2,
assignment_group__parentnode=otherassignment)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
requestkwargs={
'data': {'assignment': otherassignment.id}
},
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_ALL_ON_ASSIGNMENT})
selected_relatedstudent_ids = [
element['value']
for element in mockresponse.selector.list(
'#devilry_admin_create_groups_confirm_form input[name=selected_items]')]
self.assertEqual(
{str(relatedstudent1.id), str(relatedstudent2.id)},
set(selected_relatedstudent_ids))
def test_get_render_form_selected_items_selected_students_passing_grade_on_assignment(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent', period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent', period=testperiod)
baker.make('core.RelatedStudent', period=testperiod)
otherassignment = baker.make('core.Assignment', parentnode=testperiod,
passing_grade_min_points=1)
candidate1 = baker.make('core.Candidate',
relatedstudent=relatedstudent1,
assignment_group__parentnode=otherassignment)
devilry_group_baker_factories.feedbackset_first_attempt_published(
group=candidate1.assignment_group,
grading_points=1)
baker.make('core.Candidate',
relatedstudent=relatedstudent2,
assignment_group__parentnode=otherassignment)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
requestkwargs={
'data': {'assignment': otherassignment.id}
},
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_PASSING_GRADE_ON_ASSIGNMENT})
selected_relatedstudent_ids = [
element['value']
for element in mockresponse.selector.list(
'#devilry_admin_create_groups_confirm_form input[name=selected_items]')]
self.assertEqual(
{str(relatedstudent1.id)},
set(selected_relatedstudent_ids))
def test_get_no_relatedstudents_matching_query(self):
testperiod = baker.make('core.Period')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertEqual(
'No students matching your selection found.',
mockresponse.selector.one(
'.devilry-admin-create-groups-confirm-no-students').alltext_normalized)
def test_get_selected_students_relateadstudents(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__fullname='Match User',
period=testperiod)
baker.make('core.RelatedStudent',
user__fullname='Other User',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_RELATEDSTUDENTS})
self.assertEqual(
2,
mockresponse.selector.count('.devilry-admin-listbuilder-relatedstudent-readonlyitemvalue'))
def test_get_selected_students_all_on_assignment_invalid_assignment_id(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
otherassignment = baker.make('core.Assignment') # Not in testperiod!
testassignment = baker.make('core.Assignment', parentnode=testperiod)
with self.assertRaisesMessage(Http404, 'Invalid assignment_id'):
self.mock_getrequest(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_ALL_ON_ASSIGNMENT},
requestkwargs={
'data': {'assignment': otherassignment.id}
})
def test_get_selected_students_all_on_assignment(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent3 = baker.make('core.RelatedStudent',
user__fullname='User that is not on the other assignment',
period=testperiod)
otherassignment = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
relatedstudent=relatedstudent1,
assignment_group__parentnode=otherassignment)
baker.make('core.Candidate',
relatedstudent=relatedstudent2,
assignment_group__parentnode=otherassignment)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_ALL_ON_ASSIGNMENT},
requestkwargs={
'data': {'assignment': otherassignment.id}
})
self.assertEqual(
2,
mockresponse.selector.count('.devilry-admin-listbuilder-relatedstudent-readonlyitemvalue'))
self.assertNotContains(mockresponse.response, relatedstudent3.user.fullname)
def test_get_selected_students_passing_grade_on_assignment_invalid_assignment_id(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
otherassignment = baker.make('core.Assignment') # Not in testperiod!
testassignment = baker.make('core.Assignment', parentnode=testperiod)
with self.assertRaisesMessage(Http404, 'Invalid assignment_id'):
self.mock_getrequest(
cradmin_role=testassignment,
viewkwargs={
'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_PASSING_GRADE_ON_ASSIGNMENT},
requestkwargs={
'data': {'assignment': otherassignment.id}
})
def test_get_selected_students_passing_grade_on_assignment(self):
testperiod = baker.make('core.Period')
otherassignment = baker.make('core.Assignment',
parentnode=testperiod,
passing_grade_min_points=1)
relatedstudent1 = baker.make('core.RelatedStudent',
period=testperiod)
candidate1 = baker.make('core.Candidate',
relatedstudent=relatedstudent1,
assignment_group__parentnode=otherassignment)
devilry_group_baker_factories.feedbackset_first_attempt_published(
group=candidate1.assignment_group,
grading_points=1)
relatedstudent2 = baker.make('core.RelatedStudent',
user__fullname='User that is not candidate',
period=testperiod)
relatedstudent3 = baker.make('core.RelatedStudent',
user__fullname='User that did not pass',
period=testperiod)
candidate3 = baker.make('core.Candidate',
relatedstudent=relatedstudent3,
assignment_group__parentnode=otherassignment)
devilry_group_baker_factories.feedbackset_first_attempt_published(
group=candidate3.assignment_group,
grading_points=0)
relatedstudent4 = baker.make('core.RelatedStudent',
user__fullname='User that is not on the other assignment',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'selected_students': create_groups.ConfirmView.SELECTED_STUDENTS_PASSING_GRADE_ON_ASSIGNMENT},
requestkwargs={
'data': {'assignment': otherassignment.id}
})
self.assertEqual(
1,
mockresponse.selector.count('.devilry-admin-listbuilder-relatedstudent-readonlyitemvalue'))
self.assertNotIn(relatedstudent2.user.fullname,
mockresponse.response.content.decode())
self.assertNotIn(relatedstudent3.user.fullname,
mockresponse.response.content.decode())
self.assertNotIn(relatedstudent4.user.fullname,
mockresponse.response.content.decode())
def test_post_ok_creates_groups(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent3 = baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
self.mock_http302_postrequest(
cradmin_role=testassignment,
requestkwargs={
'data': {
'selected_items': [
relatedstudent1.id,
relatedstudent2.id,
relatedstudent3.id,
]
}
}
)
# Note: We only need a sanity tests here - the real tests are
# in the tests for AssignmentGroup.objects.bulk_create_groups()
self.assertEqual(3, AssignmentGroup.objects.count())
self.assertEqual(3, Candidate.objects.count())
self.assertEqual(3, FeedbackSet.objects.count())
first_group = AssignmentGroup.objects.first()
self.assertEqual(1, first_group.candidates.count())
self.assertEqual(1, first_group.feedbackset_set.count())
def test_post_ok_redirect(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
mock_cradmin_instance = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=testassignment,
cradmin_instance=mock_cradmin_instance,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id,
]
}
}
)
mock_cradmin_instance.appindex_url.assert_called_once_with('studentoverview')
def test_post_relatedstudent_already_on_assignment(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
relatedstudent=relatedstudent,
assignment_group__parentnode=testassignment)
self.assertEqual(1, AssignmentGroup.objects.count())
messagesmock = mock.MagicMock()
mockapp = mock.MagicMock()
mockapp.reverse_appindexurl.return_value = '/appindex'
mockresponse = self.mock_http302_postrequest(
cradmin_role=testassignment,
messagesmock=messagesmock,
cradmin_app=mockapp,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id
]
}
},
)
mockapp.reverse_appindexurl.assert_called_once_with()
self.assertEqual('/appindex', mockresponse.response['Location'])
self.assertEqual(1, AssignmentGroup.objects.count())
messagesmock.add.assert_called_once_with(
messages.ERROR,
create_groups.ManualSelectStudentsView.form_invalid_message,
'')
def test_post_relatedstudent_not_relatedstudent_on_period(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
user__shortname='userb@example.com')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
messagesmock = mock.MagicMock()
mockapp = mock.MagicMock()
mockapp.reverse_appindexurl.return_value = '/appindex'
mockresponse = self.mock_http302_postrequest(
cradmin_role=testassignment,
messagesmock=messagesmock,
cradmin_app=mockapp,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id
]
}
},
)
mockapp.reverse_appindexurl.assert_called_once_with()
self.assertEqual('/appindex', mockresponse.response['Location'])
self.assertEqual(0, AssignmentGroup.objects.count())
messagesmock.add.assert_called_once_with(
messages.ERROR,
create_groups.ConfirmView.form_invalid_message,
'')
class TestRelatedStudentMultiselectTarget(TestCase):
def test_with_items_title(self):
selector = htmls.S(create_groups.RelatedStudentMultiselectTarget(
form=forms.Form()).render(request=mock.MagicMock()))
self.assertEqual(
'Add students',
selector.one('button[type="submit"]').alltext_normalized)
class TestManualSelectStudentsView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = create_groups.ManualSelectStudentsView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
testassignment = baker.make('core.Assignment',
short_name='testassignment',
parentnode__short_name='testperiod',
parentnode__parentnode__short_name='testsubject')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertIn(
'Select the students you want to add to testsubject.testperiod.testassignment',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
testassignment = baker.make('core.Assignment', long_name='Assignment One')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'Select the students you want to add to Assignment One',
mockresponse.selector.one('h1').alltext_normalized)
def test_no_relatedstudents(self):
testperiod = baker.make('core.Period')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'No students found.',
mockresponse.selector.one(
'.devilry-admin-create-groups-manual-select-no-relatedstudents-message').alltext_normalized)
def test_relatedstudent_not_in_assignment_period_excluded(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
0,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue'))
def test_relatedstudent_in_assignment_period_included(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
1,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue'))
def test_relatedstudent_with_candidate_on_assignment_not_included(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
relatedstudent=relatedstudent,
assignment_group__parentnode=testassignment)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
0,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue'))
def test_render_relatedstudent_sanity(self):
# This is tested in detail in the tests for
# devilry.devilry_admin.cradminextensions.multiselect2.multiselect2_relatedstudent.ItemValue
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='testuser',
user__fullname='Test User',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=testassignment)
self.assertEqual(
'Test User(testuser)',
mockresponse.selector.one(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized)
def test_render_search(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='testuser',
user__fullname='Match User',
period=testperiod)
baker.make('core.RelatedStudent',
user__fullname='Other User',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'search-match'}
)
self.assertEqual(
1,
mockresponse.selector.count('.cradmin-legacy-listbuilder-itemvalue'))
self.assertEqual(
'Match User(testuser)',
mockresponse.selector.one(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized)
def test_render_orderby_default(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='x',
user__fullname='UserA',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='userc',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment)
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['UserA(x)', 'userb@example.com', 'userc'],
titles)
def test_render_orderby_name_descending(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='x',
user__fullname='UserA',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='userc',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'orderby-name_descending'})
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['userc', 'userb@example.com', 'UserA(x)'],
titles)
def test_render_orderby_lastname_ascending(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='x',
user__fullname='User Aaa',
user__lastname='Aaa',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='y',
user__fullname='User ccc',
user__lastname='ccc',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'orderby-lastname_ascending'})
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['userb@example.com', 'User Aaa(x)', 'User ccc(y)'],
titles)
def test_render_orderby_lastname_descending(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='x',
user__fullname='User Aaa',
user__lastname='Aaa',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='y',
user__fullname='User ccc',
user__lastname='ccc',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'orderby-lastname_descending'})
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['User ccc(y)', 'User Aaa(x)', 'userb@example.com'],
titles)
def test_render_orderby_shortname_ascending(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='usera@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='userc@example.com',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'orderby-shortname_ascending'})
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['usera@example.com', 'userb@example.com', 'userc@example.com'],
titles)
def test_render_orderby_shortname_descending(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='usera@example.com',
period=testperiod)
baker.make('core.RelatedStudent',
user__shortname='userc@example.com',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testassignment,
viewkwargs={'filters_string': 'orderby-shortname_descending'})
titles = [element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
self.assertEqual(
['userc@example.com', 'userb@example.com', 'usera@example.com'],
titles)
def test_post_ok_creates_groups(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent2 = baker.make('core.RelatedStudent',
period=testperiod)
relatedstudent3 = baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
self.mock_http302_postrequest(
cradmin_role=testassignment,
requestkwargs={
'data': {
'selected_items': [
relatedstudent1.id,
relatedstudent2.id,
relatedstudent3.id,
]
}
}
)
# Note: We only need a sanity tests here - the real tests are
# in the tests for AssignmentGroup.objects.bulk_create_groups()
self.assertEqual(3, AssignmentGroup.objects.count())
self.assertEqual(3, Candidate.objects.count())
self.assertEqual(3, FeedbackSet.objects.count())
first_group = AssignmentGroup.objects.first()
self.assertEqual(1, first_group.candidates.count())
self.assertEqual(1, first_group.feedbackset_set.count())
def test_post_ok_redirect(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
mock_cradmin_instance = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=testassignment,
cradmin_instance=mock_cradmin_instance,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id,
]
}
}
)
mock_cradmin_instance.appindex_url.assert_called_once_with('studentoverview')
def test_post_relatedstudent_already_on_assignment(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
user__shortname='userb@example.com',
period=testperiod)
testassignment = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
relatedstudent=relatedstudent,
assignment_group__parentnode=testassignment)
self.assertEqual(1, AssignmentGroup.objects.count())
messagesmock = mock.MagicMock()
mockapp = mock.MagicMock()
mockapp.reverse_appurl.return_value = '/manual-select'
mockresponse = self.mock_http302_postrequest(
cradmin_role=testassignment,
messagesmock=messagesmock,
cradmin_app=mockapp,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id
]
}
},
)
mockapp.reverse_appurl.assert_called_once_with('manual-select')
self.assertEqual('/manual-select', mockresponse.response['Location'])
self.assertEqual(1, AssignmentGroup.objects.count())
messagesmock.add.assert_called_once_with(
messages.ERROR,
create_groups.ManualSelectStudentsView.form_invalid_message,
'')
def test_post_relatedstudent_not_relatedstudent_on_period(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
user__shortname='userb@example.com')
testassignment = baker.make('core.Assignment', parentnode=testperiod)
self.assertEqual(0, AssignmentGroup.objects.count())
messagesmock = mock.MagicMock()
mockapp = mock.MagicMock()
mockapp.reverse_appurl.return_value = '/manual-select'
mockresponse = self.mock_http302_postrequest(
cradmin_role=testassignment,
messagesmock=messagesmock,
cradmin_app=mockapp,
requestkwargs={
'data': {
'selected_items': [
relatedstudent.id
]
}
},
)
mockapp.reverse_appurl.assert_called_once_with('manual-select')
self.assertEqual('/manual-select', mockresponse.response['Location'])
self.assertEqual(0, AssignmentGroup.objects.count())
messagesmock.add.assert_called_once_with(
messages.ERROR,
create_groups.ManualSelectStudentsView.form_invalid_message,
'')
| 48.695983
| 118
| 0.634953
| 3,918
| 44,849
| 7.018121
| 0.067126
| 0.053024
| 0.076117
| 0.050187
| 0.90959
| 0.897953
| 0.884424
| 0.871186
| 0.84191
| 0.832164
| 0
| 0.007612
| 0.276506
| 44,849
| 920
| 119
| 48.748913
| 0.839805
| 0.009231
| 0
| 0.777515
| 0
| 0
| 0.167042
| 0.057489
| 0
| 0
| 0
| 0
| 0.099408
| 1
| 0.062722
| false
| 0.016568
| 0.018935
| 0.001183
| 0.092308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4345b0010140737ffb7ee5a44a753cc21c61dc72
| 110
|
py
|
Python
|
__init__.py
|
tianzhuqiao/auibarpopup
|
cdd2a320adf088051f3803eb11432994902dfc18
|
[
"MIT"
] | 2
|
2018-05-28T23:59:02.000Z
|
2019-11-17T01:55:16.000Z
|
__init__.py
|
tianzhuqiao/auibarpopup
|
cdd2a320adf088051f3803eb11432994902dfc18
|
[
"MIT"
] | 1
|
2018-07-14T16:55:16.000Z
|
2018-07-14T16:55:16.000Z
|
__init__.py
|
tianzhuqiao/auibarpopup
|
cdd2a320adf088051f3803eb11432994902dfc18
|
[
"MIT"
] | null | null | null |
import sys
if sys.version_info.major == 2:
from auibarpopup import *
else:
from .auibarpopup import *
| 18.333333
| 31
| 0.709091
| 15
| 110
| 5.133333
| 0.666667
| 0.38961
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011494
| 0.209091
| 110
| 5
| 32
| 22
| 0.873563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
434751babe58f6d513760a7700468b05dd769202
| 20
|
py
|
Python
|
testxt.py
|
nightinwhite/key_target
|
465be12aee4673823582ed82ad935b5d8ad60990
|
[
"Apache-2.0"
] | null | null | null |
testxt.py
|
nightinwhite/key_target
|
465be12aee4673823582ed82ad935b5d8ad60990
|
[
"Apache-2.0"
] | null | null | null |
testxt.py
|
nightinwhite/key_target
|
465be12aee4673823582ed82ad935b5d8ad60990
|
[
"Apache-2.0"
] | null | null | null |
print (int(7/2+0.5))
| 20
| 20
| 0.6
| 6
| 20
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.05
| 20
| 1
| 20
| 20
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4a36ddea56acc0250fe9067d8afe2977d6ebfb9d
| 61,019
|
py
|
Python
|
sdk/python/pulumi_google_native/compute/beta/image.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/compute/beta/image.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/compute/beta/image.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ImageArgs', 'Image']
@pulumi.input_type
class ImageArgs:
def __init__(__self__, *,
archive_size_bytes: Optional[pulumi.Input[str]] = None,
deprecated: Optional[pulumi.Input['DeprecationStatusArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
disk_size_gb: Optional[pulumi.Input[str]] = None,
family: Optional[pulumi.Input[str]] = None,
force_create: Optional[pulumi.Input[str]] = None,
guest_os_features: Optional[pulumi.Input[Sequence[pulumi.Input['GuestOsFeatureArgs']]]] = None,
image_encryption_key: Optional[pulumi.Input['CustomerEncryptionKeyArgs']] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
license_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
locked: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
raw_disk: Optional[pulumi.Input['ImageRawDiskArgs']] = None,
request_id: Optional[pulumi.Input[str]] = None,
rollout_override: Optional[pulumi.Input['RolloutPolicyArgs']] = None,
shielded_instance_initial_state: Optional[pulumi.Input['InitialStateConfigArgs']] = None,
source_disk: Optional[pulumi.Input[str]] = None,
source_disk_encryption_key: Optional[pulumi.Input['CustomerEncryptionKeyArgs']] = None,
source_image: Optional[pulumi.Input[str]] = None,
source_image_encryption_key: Optional[pulumi.Input['CustomerEncryptionKeyArgs']] = None,
source_snapshot: Optional[pulumi.Input[str]] = None,
source_snapshot_encryption_key: Optional[pulumi.Input['CustomerEncryptionKeyArgs']] = None,
source_type: Optional[pulumi.Input['ImageSourceType']] = None,
storage_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Image resource.
:param pulumi.Input[str] archive_size_bytes: Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
:param pulumi.Input['DeprecationStatusArgs'] deprecated: The deprecation status associated with this image.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when you create the resource.
:param pulumi.Input[str] disk_size_gb: Size of the image when restored onto a persistent disk (in GB).
:param pulumi.Input[str] family: The name of the image family to which this image belongs. You can create disks by specifying an image family instead of a specific image name. The image family always returns its latest image that is not deprecated. The name of the image family must comply with RFC1035.
:param pulumi.Input[Sequence[pulumi.Input['GuestOsFeatureArgs']]] guest_os_features: A list of features to enable on the guest operating system. Applicable only for bootable images. To see a list of available options, see the guestOSfeatures[].type parameter.
:param pulumi.Input['CustomerEncryptionKeyArgs'] image_encryption_key: Encrypts the image using a customer-supplied encryption key. After you encrypt an image with a customer-supplied key, you must provide the same key if you use the image later (e.g. to create a disk from the image). Customer-supplied encryption keys do not protect access to metadata of the disk. If you do not provide an encryption key when creating the image, then the disk will be encrypted using an automatically generated key and you do not need to provide a key to use the image later.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this image. These can be later modified by the setLabels method.
:param pulumi.Input[Sequence[pulumi.Input[str]]] license_codes: Integer license codes indicating which licenses are attached to this image.
:param pulumi.Input[Sequence[pulumi.Input[str]]] licenses: Any applicable license URI.
:param pulumi.Input[bool] locked: A flag for marketplace VM disk created from the image, which is designed for marketplace VM disk to prevent the proprietary data on the disk from being accessed unwantedly. The flag will be inherited by the disk created from the image. The disk with locked flag set to true will be prohibited from performing the operations below: - R/W or R/O disk attach - Disk detach, if disk is created via create-on-create - Create images - Create snapshots - Create disk clone (create disk from the current disk) The image with the locked field set to true will be prohibited from performing the operations below: - Create images from the current image - Update the locked field for the current image The instance with at least one disk with locked flag set to true will be prohibited from performing the operations below: - Secondary disk attach - Create instant snapshot - Create machine images - Create instance template - Delete the instance with --keep-disk parameter set to true
:param pulumi.Input[str] name: Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
:param pulumi.Input['ImageRawDiskArgs'] raw_disk: The parameters of the raw disk image.
:param pulumi.Input['RolloutPolicyArgs'] rollout_override: A rollout policy to apply to this image. When specified, the rollout policy overrides per-zone references to the image via the associated image family. The rollout policy restricts the zones where this image is accessible when using a zonal image family reference. When the rollout policy does not include the user specified zone, or if the zone is rolled out, this image is accessible. The rollout policy for this image is read-only, except for allowlisted users. This field might not be configured. To view the latest non-deprecated image in a specific zone, use the imageFamilyViews.get method.
:param pulumi.Input['InitialStateConfigArgs'] shielded_instance_initial_state: Set the secure boot keys of shielded instance.
:param pulumi.Input[str] source_disk: URL of the source disk used to create this image. For example, the following are valid values: - https://www.googleapis.com/compute/v1/projects/project/zones/zone /disks/disk - projects/project/zones/zone/disks/disk - zones/zone/disks/disk In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input['CustomerEncryptionKeyArgs'] source_disk_encryption_key: The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key.
:param pulumi.Input[str] source_image: URL of the source image used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ images/image_name - projects/project_id/global/images/image_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input['CustomerEncryptionKeyArgs'] source_image_encryption_key: The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key.
:param pulumi.Input[str] source_snapshot: URL of the source snapshot used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ snapshots/snapshot_name - projects/project_id/global/snapshots/snapshot_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input['CustomerEncryptionKeyArgs'] source_snapshot_encryption_key: The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key.
:param pulumi.Input['ImageSourceType'] source_type: The type of the image used to create this disk. The default and only value is RAW
:param pulumi.Input[Sequence[pulumi.Input[str]]] storage_locations: Cloud Storage bucket storage location of the image (regional or multi-regional).
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_licenses: A list of publicly visible user-licenses. Unlike regular licenses, user provided licenses can be modified after the disk is created. This includes a list of URLs to the license resource. For example, to provide a debian license: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-9-stretch
"""
if archive_size_bytes is not None:
pulumi.set(__self__, "archive_size_bytes", archive_size_bytes)
if deprecated is not None:
pulumi.set(__self__, "deprecated", deprecated)
if description is not None:
pulumi.set(__self__, "description", description)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if family is not None:
pulumi.set(__self__, "family", family)
if force_create is not None:
pulumi.set(__self__, "force_create", force_create)
if guest_os_features is not None:
pulumi.set(__self__, "guest_os_features", guest_os_features)
if image_encryption_key is not None:
pulumi.set(__self__, "image_encryption_key", image_encryption_key)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if license_codes is not None:
pulumi.set(__self__, "license_codes", license_codes)
if licenses is not None:
pulumi.set(__self__, "licenses", licenses)
if locked is not None:
pulumi.set(__self__, "locked", locked)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if raw_disk is not None:
pulumi.set(__self__, "raw_disk", raw_disk)
if request_id is not None:
pulumi.set(__self__, "request_id", request_id)
if rollout_override is not None:
pulumi.set(__self__, "rollout_override", rollout_override)
if shielded_instance_initial_state is not None:
pulumi.set(__self__, "shielded_instance_initial_state", shielded_instance_initial_state)
if source_disk is not None:
pulumi.set(__self__, "source_disk", source_disk)
if source_disk_encryption_key is not None:
pulumi.set(__self__, "source_disk_encryption_key", source_disk_encryption_key)
if source_image is not None:
pulumi.set(__self__, "source_image", source_image)
if source_image_encryption_key is not None:
pulumi.set(__self__, "source_image_encryption_key", source_image_encryption_key)
if source_snapshot is not None:
pulumi.set(__self__, "source_snapshot", source_snapshot)
if source_snapshot_encryption_key is not None:
pulumi.set(__self__, "source_snapshot_encryption_key", source_snapshot_encryption_key)
if source_type is not None:
pulumi.set(__self__, "source_type", source_type)
if storage_locations is not None:
pulumi.set(__self__, "storage_locations", storage_locations)
if user_licenses is not None:
pulumi.set(__self__, "user_licenses", user_licenses)
@property
@pulumi.getter(name="archiveSizeBytes")
def archive_size_bytes(self) -> Optional[pulumi.Input[str]]:
"""
Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
"""
return pulumi.get(self, "archive_size_bytes")
@archive_size_bytes.setter
def archive_size_bytes(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "archive_size_bytes", value)
@property
@pulumi.getter
def deprecated(self) -> Optional[pulumi.Input['DeprecationStatusArgs']]:
"""
The deprecation status associated with this image.
"""
return pulumi.get(self, "deprecated")
@deprecated.setter
def deprecated(self, value: Optional[pulumi.Input['DeprecationStatusArgs']]):
pulumi.set(self, "deprecated", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[pulumi.Input[str]]:
"""
Size of the image when restored onto a persistent disk (in GB).
"""
return pulumi.get(self, "disk_size_gb")
@disk_size_gb.setter
def disk_size_gb(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disk_size_gb", value)
@property
@pulumi.getter
def family(self) -> Optional[pulumi.Input[str]]:
"""
The name of the image family to which this image belongs. You can create disks by specifying an image family instead of a specific image name. The image family always returns its latest image that is not deprecated. The name of the image family must comply with RFC1035.
"""
return pulumi.get(self, "family")
@family.setter
def family(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "family", value)
@property
@pulumi.getter(name="forceCreate")
def force_create(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "force_create")
@force_create.setter
def force_create(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "force_create", value)
@property
@pulumi.getter(name="guestOsFeatures")
def guest_os_features(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GuestOsFeatureArgs']]]]:
"""
A list of features to enable on the guest operating system. Applicable only for bootable images. To see a list of available options, see the guestOSfeatures[].type parameter.
"""
return pulumi.get(self, "guest_os_features")
@guest_os_features.setter
def guest_os_features(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GuestOsFeatureArgs']]]]):
pulumi.set(self, "guest_os_features", value)
@property
@pulumi.getter(name="imageEncryptionKey")
def image_encryption_key(self) -> Optional[pulumi.Input['CustomerEncryptionKeyArgs']]:
"""
Encrypts the image using a customer-supplied encryption key. After you encrypt an image with a customer-supplied key, you must provide the same key if you use the image later (e.g. to create a disk from the image). Customer-supplied encryption keys do not protect access to metadata of the disk. If you do not provide an encryption key when creating the image, then the disk will be encrypted using an automatically generated key and you do not need to provide a key to use the image later.
"""
return pulumi.get(self, "image_encryption_key")
@image_encryption_key.setter
def image_encryption_key(self, value: Optional[pulumi.Input['CustomerEncryptionKeyArgs']]):
pulumi.set(self, "image_encryption_key", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to apply to this image. These can be later modified by the setLabels method.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="licenseCodes")
def license_codes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Integer license codes indicating which licenses are attached to this image.
"""
return pulumi.get(self, "license_codes")
@license_codes.setter
def license_codes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "license_codes", value)
@property
@pulumi.getter
def licenses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Any applicable license URI.
"""
return pulumi.get(self, "licenses")
@licenses.setter
def licenses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "licenses", value)
@property
@pulumi.getter
def locked(self) -> Optional[pulumi.Input[bool]]:
"""
A flag for marketplace VM disk created from the image, which is designed for marketplace VM disk to prevent the proprietary data on the disk from being accessed unwantedly. The flag will be inherited by the disk created from the image. The disk with locked flag set to true will be prohibited from performing the operations below: - R/W or R/O disk attach - Disk detach, if disk is created via create-on-create - Create images - Create snapshots - Create disk clone (create disk from the current disk) The image with the locked field set to true will be prohibited from performing the operations below: - Create images from the current image - Update the locked field for the current image The instance with at least one disk with locked flag set to true will be prohibited from performing the operations below: - Secondary disk attach - Create instant snapshot - Create machine images - Create instance template - Delete the instance with --keep-disk parameter set to true
"""
return pulumi.get(self, "locked")
@locked.setter
def locked(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "locked", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="rawDisk")
def raw_disk(self) -> Optional[pulumi.Input['ImageRawDiskArgs']]:
"""
The parameters of the raw disk image.
"""
return pulumi.get(self, "raw_disk")
@raw_disk.setter
def raw_disk(self, value: Optional[pulumi.Input['ImageRawDiskArgs']]):
pulumi.set(self, "raw_disk", value)
@property
@pulumi.getter(name="requestId")
def request_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "request_id")
@request_id.setter
def request_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_id", value)
@property
@pulumi.getter(name="rolloutOverride")
def rollout_override(self) -> Optional[pulumi.Input['RolloutPolicyArgs']]:
"""
A rollout policy to apply to this image. When specified, the rollout policy overrides per-zone references to the image via the associated image family. The rollout policy restricts the zones where this image is accessible when using a zonal image family reference. When the rollout policy does not include the user specified zone, or if the zone is rolled out, this image is accessible. The rollout policy for this image is read-only, except for allowlisted users. This field might not be configured. To view the latest non-deprecated image in a specific zone, use the imageFamilyViews.get method.
"""
return pulumi.get(self, "rollout_override")
@rollout_override.setter
def rollout_override(self, value: Optional[pulumi.Input['RolloutPolicyArgs']]):
pulumi.set(self, "rollout_override", value)
@property
@pulumi.getter(name="shieldedInstanceInitialState")
def shielded_instance_initial_state(self) -> Optional[pulumi.Input['InitialStateConfigArgs']]:
"""
Set the secure boot keys of shielded instance.
"""
return pulumi.get(self, "shielded_instance_initial_state")
@shielded_instance_initial_state.setter
def shielded_instance_initial_state(self, value: Optional[pulumi.Input['InitialStateConfigArgs']]):
pulumi.set(self, "shielded_instance_initial_state", value)
@property
@pulumi.getter(name="sourceDisk")
def source_disk(self) -> Optional[pulumi.Input[str]]:
"""
URL of the source disk used to create this image. For example, the following are valid values: - https://www.googleapis.com/compute/v1/projects/project/zones/zone /disks/disk - projects/project/zones/zone/disks/disk - zones/zone/disks/disk In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_disk")
@source_disk.setter
def source_disk(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_disk", value)
@property
@pulumi.getter(name="sourceDiskEncryptionKey")
def source_disk_encryption_key(self) -> Optional[pulumi.Input['CustomerEncryptionKeyArgs']]:
"""
The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_disk_encryption_key")
@source_disk_encryption_key.setter
def source_disk_encryption_key(self, value: Optional[pulumi.Input['CustomerEncryptionKeyArgs']]):
pulumi.set(self, "source_disk_encryption_key", value)
@property
@pulumi.getter(name="sourceImage")
def source_image(self) -> Optional[pulumi.Input[str]]:
"""
URL of the source image used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ images/image_name - projects/project_id/global/images/image_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_image")
@source_image.setter
def source_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_image", value)
@property
@pulumi.getter(name="sourceImageEncryptionKey")
def source_image_encryption_key(self) -> Optional[pulumi.Input['CustomerEncryptionKeyArgs']]:
"""
The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_image_encryption_key")
@source_image_encryption_key.setter
def source_image_encryption_key(self, value: Optional[pulumi.Input['CustomerEncryptionKeyArgs']]):
pulumi.set(self, "source_image_encryption_key", value)
@property
@pulumi.getter(name="sourceSnapshot")
def source_snapshot(self) -> Optional[pulumi.Input[str]]:
"""
URL of the source snapshot used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ snapshots/snapshot_name - projects/project_id/global/snapshots/snapshot_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_snapshot")
@source_snapshot.setter
def source_snapshot(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_snapshot", value)
@property
@pulumi.getter(name="sourceSnapshotEncryptionKey")
def source_snapshot_encryption_key(self) -> Optional[pulumi.Input['CustomerEncryptionKeyArgs']]:
"""
The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_snapshot_encryption_key")
@source_snapshot_encryption_key.setter
def source_snapshot_encryption_key(self, value: Optional[pulumi.Input['CustomerEncryptionKeyArgs']]):
pulumi.set(self, "source_snapshot_encryption_key", value)
@property
@pulumi.getter(name="sourceType")
def source_type(self) -> Optional[pulumi.Input['ImageSourceType']]:
"""
The type of the image used to create this disk. The default and only value is RAW
"""
return pulumi.get(self, "source_type")
@source_type.setter
def source_type(self, value: Optional[pulumi.Input['ImageSourceType']]):
pulumi.set(self, "source_type", value)
@property
@pulumi.getter(name="storageLocations")
def storage_locations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Cloud Storage bucket storage location of the image (regional or multi-regional).
"""
return pulumi.get(self, "storage_locations")
@storage_locations.setter
def storage_locations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "storage_locations", value)
@property
@pulumi.getter(name="userLicenses")
def user_licenses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of publicly visible user-licenses. Unlike regular licenses, user provided licenses can be modified after the disk is created. This includes a list of URLs to the license resource. For example, to provide a debian license: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-9-stretch
"""
return pulumi.get(self, "user_licenses")
@user_licenses.setter
def user_licenses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "user_licenses", value)
class Image(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
archive_size_bytes: Optional[pulumi.Input[str]] = None,
deprecated: Optional[pulumi.Input[pulumi.InputType['DeprecationStatusArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disk_size_gb: Optional[pulumi.Input[str]] = None,
family: Optional[pulumi.Input[str]] = None,
force_create: Optional[pulumi.Input[str]] = None,
guest_os_features: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GuestOsFeatureArgs']]]]] = None,
image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
license_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
locked: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
raw_disk: Optional[pulumi.Input[pulumi.InputType['ImageRawDiskArgs']]] = None,
request_id: Optional[pulumi.Input[str]] = None,
rollout_override: Optional[pulumi.Input[pulumi.InputType['RolloutPolicyArgs']]] = None,
shielded_instance_initial_state: Optional[pulumi.Input[pulumi.InputType['InitialStateConfigArgs']]] = None,
source_disk: Optional[pulumi.Input[str]] = None,
source_disk_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_image: Optional[pulumi.Input[str]] = None,
source_image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_snapshot: Optional[pulumi.Input[str]] = None,
source_snapshot_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_type: Optional[pulumi.Input['ImageSourceType']] = None,
storage_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Creates an image in the specified project using the data included in the request.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] archive_size_bytes: Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
:param pulumi.Input[pulumi.InputType['DeprecationStatusArgs']] deprecated: The deprecation status associated with this image.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when you create the resource.
:param pulumi.Input[str] disk_size_gb: Size of the image when restored onto a persistent disk (in GB).
:param pulumi.Input[str] family: The name of the image family to which this image belongs. You can create disks by specifying an image family instead of a specific image name. The image family always returns its latest image that is not deprecated. The name of the image family must comply with RFC1035.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GuestOsFeatureArgs']]]] guest_os_features: A list of features to enable on the guest operating system. Applicable only for bootable images. To see a list of available options, see the guestOSfeatures[].type parameter.
:param pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']] image_encryption_key: Encrypts the image using a customer-supplied encryption key. After you encrypt an image with a customer-supplied key, you must provide the same key if you use the image later (e.g. to create a disk from the image). Customer-supplied encryption keys do not protect access to metadata of the disk. If you do not provide an encryption key when creating the image, then the disk will be encrypted using an automatically generated key and you do not need to provide a key to use the image later.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this image. These can be later modified by the setLabels method.
:param pulumi.Input[Sequence[pulumi.Input[str]]] license_codes: Integer license codes indicating which licenses are attached to this image.
:param pulumi.Input[Sequence[pulumi.Input[str]]] licenses: Any applicable license URI.
:param pulumi.Input[bool] locked: A flag for marketplace VM disk created from the image, which is designed for marketplace VM disk to prevent the proprietary data on the disk from being accessed unwantedly. The flag will be inherited by the disk created from the image. The disk with locked flag set to true will be prohibited from performing the operations below: - R/W or R/O disk attach - Disk detach, if disk is created via create-on-create - Create images - Create snapshots - Create disk clone (create disk from the current disk) The image with the locked field set to true will be prohibited from performing the operations below: - Create images from the current image - Update the locked field for the current image The instance with at least one disk with locked flag set to true will be prohibited from performing the operations below: - Secondary disk attach - Create instant snapshot - Create machine images - Create instance template - Delete the instance with --keep-disk parameter set to true
:param pulumi.Input[str] name: Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
:param pulumi.Input[pulumi.InputType['ImageRawDiskArgs']] raw_disk: The parameters of the raw disk image.
:param pulumi.Input[pulumi.InputType['RolloutPolicyArgs']] rollout_override: A rollout policy to apply to this image. When specified, the rollout policy overrides per-zone references to the image via the associated image family. The rollout policy restricts the zones where this image is accessible when using a zonal image family reference. When the rollout policy does not include the user specified zone, or if the zone is rolled out, this image is accessible. The rollout policy for this image is read-only, except for allowlisted users. This field might not be configured. To view the latest non-deprecated image in a specific zone, use the imageFamilyViews.get method.
:param pulumi.Input[pulumi.InputType['InitialStateConfigArgs']] shielded_instance_initial_state: Set the secure boot keys of shielded instance.
:param pulumi.Input[str] source_disk: URL of the source disk used to create this image. For example, the following are valid values: - https://www.googleapis.com/compute/v1/projects/project/zones/zone /disks/disk - projects/project/zones/zone/disks/disk - zones/zone/disks/disk In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']] source_disk_encryption_key: The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key.
:param pulumi.Input[str] source_image: URL of the source image used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ images/image_name - projects/project_id/global/images/image_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']] source_image_encryption_key: The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key.
:param pulumi.Input[str] source_snapshot: URL of the source snapshot used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ snapshots/snapshot_name - projects/project_id/global/snapshots/snapshot_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
:param pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']] source_snapshot_encryption_key: The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key.
:param pulumi.Input['ImageSourceType'] source_type: The type of the image used to create this disk. The default and only value is RAW
:param pulumi.Input[Sequence[pulumi.Input[str]]] storage_locations: Cloud Storage bucket storage location of the image (regional or multi-regional).
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_licenses: A list of publicly visible user-licenses. Unlike regular licenses, user provided licenses can be modified after the disk is created. This includes a list of URLs to the license resource. For example, to provide a debian license: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-9-stretch
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ImageArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates an image in the specified project using the data included in the request.
:param str resource_name: The name of the resource.
:param ImageArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ImageArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
archive_size_bytes: Optional[pulumi.Input[str]] = None,
deprecated: Optional[pulumi.Input[pulumi.InputType['DeprecationStatusArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disk_size_gb: Optional[pulumi.Input[str]] = None,
family: Optional[pulumi.Input[str]] = None,
force_create: Optional[pulumi.Input[str]] = None,
guest_os_features: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GuestOsFeatureArgs']]]]] = None,
image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
license_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
locked: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
raw_disk: Optional[pulumi.Input[pulumi.InputType['ImageRawDiskArgs']]] = None,
request_id: Optional[pulumi.Input[str]] = None,
rollout_override: Optional[pulumi.Input[pulumi.InputType['RolloutPolicyArgs']]] = None,
shielded_instance_initial_state: Optional[pulumi.Input[pulumi.InputType['InitialStateConfigArgs']]] = None,
source_disk: Optional[pulumi.Input[str]] = None,
source_disk_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_image: Optional[pulumi.Input[str]] = None,
source_image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_snapshot: Optional[pulumi.Input[str]] = None,
source_snapshot_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
source_type: Optional[pulumi.Input['ImageSourceType']] = None,
storage_locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_licenses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ImageArgs.__new__(ImageArgs)
__props__.__dict__["archive_size_bytes"] = archive_size_bytes
__props__.__dict__["deprecated"] = deprecated
__props__.__dict__["description"] = description
__props__.__dict__["disk_size_gb"] = disk_size_gb
__props__.__dict__["family"] = family
__props__.__dict__["force_create"] = force_create
__props__.__dict__["guest_os_features"] = guest_os_features
__props__.__dict__["image_encryption_key"] = image_encryption_key
__props__.__dict__["labels"] = labels
__props__.__dict__["license_codes"] = license_codes
__props__.__dict__["licenses"] = licenses
__props__.__dict__["locked"] = locked
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["raw_disk"] = raw_disk
__props__.__dict__["request_id"] = request_id
__props__.__dict__["rollout_override"] = rollout_override
__props__.__dict__["shielded_instance_initial_state"] = shielded_instance_initial_state
__props__.__dict__["source_disk"] = source_disk
__props__.__dict__["source_disk_encryption_key"] = source_disk_encryption_key
__props__.__dict__["source_image"] = source_image
__props__.__dict__["source_image_encryption_key"] = source_image_encryption_key
__props__.__dict__["source_snapshot"] = source_snapshot
__props__.__dict__["source_snapshot_encryption_key"] = source_snapshot_encryption_key
__props__.__dict__["source_type"] = source_type
__props__.__dict__["storage_locations"] = storage_locations
__props__.__dict__["user_licenses"] = user_licenses
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["label_fingerprint"] = None
__props__.__dict__["satisfies_pzs"] = None
__props__.__dict__["self_link"] = None
__props__.__dict__["source_disk_id"] = None
__props__.__dict__["source_image_id"] = None
__props__.__dict__["source_snapshot_id"] = None
__props__.__dict__["status"] = None
super(Image, __self__).__init__(
'google-native:compute/beta:Image',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Image':
"""
Get an existing Image resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ImageArgs.__new__(ImageArgs)
__props__.__dict__["archive_size_bytes"] = None
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["deprecated"] = None
__props__.__dict__["description"] = None
__props__.__dict__["disk_size_gb"] = None
__props__.__dict__["family"] = None
__props__.__dict__["guest_os_features"] = None
__props__.__dict__["image_encryption_key"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["label_fingerprint"] = None
__props__.__dict__["labels"] = None
__props__.__dict__["license_codes"] = None
__props__.__dict__["licenses"] = None
__props__.__dict__["locked"] = None
__props__.__dict__["name"] = None
__props__.__dict__["raw_disk"] = None
__props__.__dict__["rollout_override"] = None
__props__.__dict__["satisfies_pzs"] = None
__props__.__dict__["self_link"] = None
__props__.__dict__["shielded_instance_initial_state"] = None
__props__.__dict__["source_disk"] = None
__props__.__dict__["source_disk_encryption_key"] = None
__props__.__dict__["source_disk_id"] = None
__props__.__dict__["source_image"] = None
__props__.__dict__["source_image_encryption_key"] = None
__props__.__dict__["source_image_id"] = None
__props__.__dict__["source_snapshot"] = None
__props__.__dict__["source_snapshot_encryption_key"] = None
__props__.__dict__["source_snapshot_id"] = None
__props__.__dict__["source_type"] = None
__props__.__dict__["status"] = None
__props__.__dict__["storage_locations"] = None
__props__.__dict__["user_licenses"] = None
return Image(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveSizeBytes")
def archive_size_bytes(self) -> pulumi.Output[str]:
"""
Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
"""
return pulumi.get(self, "archive_size_bytes")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def deprecated(self) -> pulumi.Output['outputs.DeprecationStatusResponse']:
"""
The deprecation status associated with this image.
"""
return pulumi.get(self, "deprecated")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
An optional description of this resource. Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> pulumi.Output[str]:
"""
Size of the image when restored onto a persistent disk (in GB).
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter
def family(self) -> pulumi.Output[str]:
"""
The name of the image family to which this image belongs. You can create disks by specifying an image family instead of a specific image name. The image family always returns its latest image that is not deprecated. The name of the image family must comply with RFC1035.
"""
return pulumi.get(self, "family")
@property
@pulumi.getter(name="guestOsFeatures")
def guest_os_features(self) -> pulumi.Output[Sequence['outputs.GuestOsFeatureResponse']]:
"""
A list of features to enable on the guest operating system. Applicable only for bootable images. To see a list of available options, see the guestOSfeatures[].type parameter.
"""
return pulumi.get(self, "guest_os_features")
@property
@pulumi.getter(name="imageEncryptionKey")
def image_encryption_key(self) -> pulumi.Output['outputs.CustomerEncryptionKeyResponse']:
"""
Encrypts the image using a customer-supplied encryption key. After you encrypt an image with a customer-supplied key, you must provide the same key if you use the image later (e.g. to create a disk from the image). Customer-supplied encryption keys do not protect access to metadata of the disk. If you do not provide an encryption key when creating the image, then the disk will be encrypted using an automatically generated key and you do not need to provide a key to use the image later.
"""
return pulumi.get(self, "image_encryption_key")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[str]:
"""
Type of the resource. Always compute#image for images.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> pulumi.Output[str]:
"""
A fingerprint for the labels being applied to this image, which is essentially a hash of the labels used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an image.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, str]]:
"""
Labels to apply to this image. These can be later modified by the setLabels method.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="licenseCodes")
def license_codes(self) -> pulumi.Output[Sequence[str]]:
"""
Integer license codes indicating which licenses are attached to this image.
"""
return pulumi.get(self, "license_codes")
@property
@pulumi.getter
def licenses(self) -> pulumi.Output[Sequence[str]]:
"""
Any applicable license URI.
"""
return pulumi.get(self, "licenses")
@property
@pulumi.getter
def locked(self) -> pulumi.Output[bool]:
"""
A flag for marketplace VM disk created from the image, which is designed for marketplace VM disk to prevent the proprietary data on the disk from being accessed unwantedly. The flag will be inherited by the disk created from the image. The disk with locked flag set to true will be prohibited from performing the operations below: - R/W or R/O disk attach - Disk detach, if disk is created via create-on-create - Create images - Create snapshots - Create disk clone (create disk from the current disk) The image with the locked field set to true will be prohibited from performing the operations below: - Create images from the current image - Update the locked field for the current image The instance with at least one disk with locked flag set to true will be prohibited from performing the operations below: - Secondary disk attach - Create instant snapshot - Create machine images - Create instance template - Delete the instance with --keep-disk parameter set to true
"""
return pulumi.get(self, "locked")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="rawDisk")
def raw_disk(self) -> pulumi.Output['outputs.ImageRawDiskResponse']:
"""
The parameters of the raw disk image.
"""
return pulumi.get(self, "raw_disk")
@property
@pulumi.getter(name="rolloutOverride")
def rollout_override(self) -> pulumi.Output['outputs.RolloutPolicyResponse']:
"""
A rollout policy to apply to this image. When specified, the rollout policy overrides per-zone references to the image via the associated image family. The rollout policy restricts the zones where this image is accessible when using a zonal image family reference. When the rollout policy does not include the user specified zone, or if the zone is rolled out, this image is accessible. The rollout policy for this image is read-only, except for allowlisted users. This field might not be configured. To view the latest non-deprecated image in a specific zone, use the imageFamilyViews.get method.
"""
return pulumi.get(self, "rollout_override")
@property
@pulumi.getter(name="satisfiesPzs")
def satisfies_pzs(self) -> pulumi.Output[bool]:
"""
Reserved for future use.
"""
return pulumi.get(self, "satisfies_pzs")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
Server-defined URL for the resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="shieldedInstanceInitialState")
def shielded_instance_initial_state(self) -> pulumi.Output['outputs.InitialStateConfigResponse']:
"""
Set the secure boot keys of shielded instance.
"""
return pulumi.get(self, "shielded_instance_initial_state")
@property
@pulumi.getter(name="sourceDisk")
def source_disk(self) -> pulumi.Output[str]:
"""
URL of the source disk used to create this image. For example, the following are valid values: - https://www.googleapis.com/compute/v1/projects/project/zones/zone /disks/disk - projects/project/zones/zone/disks/disk - zones/zone/disks/disk In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_disk")
@property
@pulumi.getter(name="sourceDiskEncryptionKey")
def source_disk_encryption_key(self) -> pulumi.Output['outputs.CustomerEncryptionKeyResponse']:
"""
The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_disk_encryption_key")
@property
@pulumi.getter(name="sourceDiskId")
def source_disk_id(self) -> pulumi.Output[str]:
"""
The ID value of the disk used to create this image. This value may be used to determine whether the image was taken from the current or a previous instance of a given disk name.
"""
return pulumi.get(self, "source_disk_id")
@property
@pulumi.getter(name="sourceImage")
def source_image(self) -> pulumi.Output[str]:
"""
URL of the source image used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ images/image_name - projects/project_id/global/images/image_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_image")
@property
@pulumi.getter(name="sourceImageEncryptionKey")
def source_image_encryption_key(self) -> pulumi.Output['outputs.CustomerEncryptionKeyResponse']:
"""
The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_image_encryption_key")
@property
@pulumi.getter(name="sourceImageId")
def source_image_id(self) -> pulumi.Output[str]:
"""
The ID value of the image used to create this image. This value may be used to determine whether the image was taken from the current or a previous instance of a given image name.
"""
return pulumi.get(self, "source_image_id")
@property
@pulumi.getter(name="sourceSnapshot")
def source_snapshot(self) -> pulumi.Output[str]:
"""
URL of the source snapshot used to create this image. The following are valid formats for the URL: - https://www.googleapis.com/compute/v1/projects/project_id/global/ snapshots/snapshot_name - projects/project_id/global/snapshots/snapshot_name In order to create an image, you must provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL
"""
return pulumi.get(self, "source_snapshot")
@property
@pulumi.getter(name="sourceSnapshotEncryptionKey")
def source_snapshot_encryption_key(self) -> pulumi.Output['outputs.CustomerEncryptionKeyResponse']:
"""
The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key.
"""
return pulumi.get(self, "source_snapshot_encryption_key")
@property
@pulumi.getter(name="sourceSnapshotId")
def source_snapshot_id(self) -> pulumi.Output[str]:
"""
The ID value of the snapshot used to create this image. This value may be used to determine whether the snapshot was taken from the current or a previous instance of a given snapshot name.
"""
return pulumi.get(self, "source_snapshot_id")
@property
@pulumi.getter(name="sourceType")
def source_type(self) -> pulumi.Output[str]:
"""
The type of the image used to create this disk. The default and only value is RAW
"""
return pulumi.get(self, "source_type")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of the image. An image can be used to create other resources, such as instances, only after the image has been successfully created and the status is set to READY. Possible values are FAILED, PENDING, or READY.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="storageLocations")
def storage_locations(self) -> pulumi.Output[Sequence[str]]:
"""
Cloud Storage bucket storage location of the image (regional or multi-regional).
"""
return pulumi.get(self, "storage_locations")
@property
@pulumi.getter(name="userLicenses")
def user_licenses(self) -> pulumi.Output[Sequence[str]]:
"""
A list of publicly visible user-licenses. Unlike regular licenses, user provided licenses can be modified after the disk is created. This includes a list of URLs to the license resource. For example, to provide a debian license: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-9-stretch
"""
return pulumi.get(self, "user_licenses")
| 65.824164
| 1,016
| 0.703404
| 7,903
| 61,019
| 5.25193
| 0.054283
| 0.060425
| 0.061798
| 0.027466
| 0.881632
| 0.846215
| 0.807377
| 0.790657
| 0.769527
| 0.713078
| 0
| 0.002068
| 0.207542
| 61,019
| 926
| 1,017
| 65.895248
| 0.856292
| 0.459726
| 0
| 0.45189
| 1
| 0
| 0.152746
| 0.062275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159794
| false
| 0.001718
| 0.013746
| 0.005155
| 0.281787
| 0.008591
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a4c360b0c3fea943bf380ae8e14b9e0fc574a7b
| 28,508
|
py
|
Python
|
tests/unit/dataactbroker/test_generation_helper.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | 1
|
2019-06-22T21:53:16.000Z
|
2019-06-22T21:53:16.000Z
|
tests/unit/dataactbroker/test_generation_helper.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactbroker/test_generation_helper.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | null | null | null |
import pytest
from datetime import datetime, date
from unittest.mock import Mock
from dataactbroker.helpers import generation_helper
from dataactbroker.helpers.generation_helper import (
check_file_generation, check_generation_prereqs, copy_file_generation_to_job, start_d_generation,
retrieve_cached_file_generation)
from dataactcore.config import CONFIG_BROKER
from dataactcore.models.lookups import JOB_STATUS_DICT, JOB_TYPE_DICT, FILE_TYPE_DICT
from dataactcore.models.jobModels import FileGeneration
from dataactcore.utils.responseException import ResponseException
from tests.unit.dataactcore.factories.job import JobFactory, SubmissionFactory, FileGenerationFactory
@pytest.mark.usefixtures("job_constants")
def test_start_d_generation_submission_cached(database, monkeypatch):
""" Cached D files must update the upload Job with the FileGeneration data. """
sess = database.session
original_filename = 'D1_test_gen.csv'
file_path = gen_file_path_from_submission('None/', original_filename)
submission = SubmissionFactory(
submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', frec_code='1234',
cgac_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
agency_code='1234', agency_type='awarding', is_cached_file=True, file_path=file_path)
up_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], file_type_id=FILE_TYPE_DICT['award'], error_message=None,
job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, original_filename=None,
submission_id=submission.submission_id)
val_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, original_filename=None,
submission_id=submission.submission_id)
sess.add_all([submission, file_gen, up_job, val_job])
sess.commit()
monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')
assert up_job.file_generation_id == file_gen.file_generation_id
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 31)
assert up_job.original_filename == original_filename
assert up_job.filename == gen_file_path_from_submission(up_job.submission_id, original_filename)
assert up_job.job_status_id == JOB_STATUS_DICT['finished']
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 31)
assert up_job.original_filename == original_filename
assert up_job.filename == gen_file_path_from_submission(up_job.submission_id, original_filename)
assert up_job.job_status_id != JOB_STATUS_DICT['waiting']
@pytest.mark.usefixtures("job_constants")
def test_start_d_generation_submission_change_request(database, monkeypatch):
""" In-submission generations that change their requested start or end dates must actually generate files based on
the new dates.
"""
sess = database.session
original_filename = 'D1_test_gen.csv'
file_path = gen_file_path_from_submission('None/', original_filename)
submission = SubmissionFactory(
submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', cgac_code='123',
frec_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D1',
agency_code='123', agency_type='awarding', is_cached_file=True, file_path=file_path, file_generation_id=1000)
up_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, job_type_id=JOB_TYPE_DICT['file_upload'],
file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, submission_id=submission.submission_id,
file_generation_id=file_gen.file_generation_id, original_filename=original_filename)
val_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award_procurement'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, submission_id=submission.submission_id,
original_filename=original_filename)
sess.add_all([submission, file_gen, up_job, val_job])
sess.commit()
monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
start_d_generation(up_job, '01/01/2017', '01/30/2017', 'funding')
assert up_job.file_generation_id != file_gen.file_generation_id
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 30)
assert up_job.original_filename != original_filename
assert up_job.filename != gen_file_path_from_submission(up_job.submission_id, original_filename)
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 30)
assert up_job.original_filename == up_job.original_filename
assert up_job.filename == up_job.filename
@pytest.mark.usefixtures("job_constants")
def test_start_d_generation_submission_new(database, monkeypatch):
""" A new file generation must update the upload Job and create a new FileGeneration object. """
sess = database.session
original_filename = 'D2_test_gen.csv'
submission = SubmissionFactory(
submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', cgac_code='123',
frec_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
up_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, submission_id=submission.submission_id,
original_filename=original_filename, file_generation_id=None)
val_job = JobFactory(
job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, submission_id=submission.submission_id,
original_filename=original_filename)
sess.add_all([submission, up_job, val_job])
sess.commit()
monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')
assert up_job.file_generation_id is not None
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 31)
assert up_job.original_filename != original_filename
assert up_job.filename != gen_file_path_from_submission(up_job.submission_id, original_filename)
assert up_job.start_date == date(2017, 1, 1)
assert up_job.end_date == date(2017, 1, 31)
assert up_job.original_filename == up_job.original_filename
assert up_job.filename == up_job.filename
file_gen = sess.query(FileGeneration).filter_by(file_generation_id=up_job.file_generation_id).one_or_none()
assert file_gen is not None
assert file_gen.request_date == datetime.now().date()
assert file_gen.start_date == date(2017, 1, 1)
assert file_gen.end_date == date(2017, 1, 31)
assert file_gen.file_type == 'D2'
assert file_gen.file_path != gen_file_path_from_submission('None', original_filename)
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation(database):
""" Should successfully return the correct cached FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
agency_code='123', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation == file_gen
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_none(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
sess.add(job)
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_end_date_diff(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-30', file_type='D2',
agency_code='123', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_start_date_diff(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-02', end_date='2017-01-31', file_type='D2',
agency_code='123', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_agency_code_diff(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
agency_code='124', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_agency_type_diff(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
agency_code='123', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'funding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_file_type_diff(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D1',
agency_code='123', agency_type='awarding', is_cached_file=True)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_retrieve_cached_file_generation_not_cached(database):
""" Should return no FileGeneration """
sess = database.session
job = JobFactory(
start_date='2017-01-01', end_date='2017-01-31', job_status_id=JOB_STATUS_DICT['waiting'], error_message=None,
file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None,
original_filename=None, file_generation_id=None)
file_gen = FileGenerationFactory(
request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
agency_code='123', agency_type='awarding', is_cached_file=False)
sess.add_all([job, file_gen])
sess.commit()
file_generation = retrieve_cached_file_generation(job, 'awarding', '123')
assert file_generation is None
@pytest.mark.usefixtures("job_constants")
def test_check_detached_d_file_generation(database):
""" Job statuses should return the correct status and error message to the user """
sess = database.session
# Detached D2 generation waiting to be picked up by the Validator
job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['file_upload'],
file_type_id=FILE_TYPE_DICT['award'], error_message='', filename='job_id/file.csv',
original_filename='file.csv')
sess.add(job)
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# Detached D2 generation running in the Validator
job.job_status_id = JOB_STATUS_DICT['running']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# Detached D2 generation completed by the Validator
job.job_status_id = JOB_STATUS_DICT['finished']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'finished'
assert response_dict['message'] == ''
# Detached D2 generation with an unknown error
job.job_status_id = JOB_STATUS_DICT['failed']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'Upload job failed without error message'
# Detached D2 generation with a known error
job.error_message = 'FABS upload error message'
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'FABS upload error message'
@pytest.mark.usefixtures("job_constants")
def test_check_submission_d_file_generation(database):
""" Job statuses should return the correct status and error message to the user """
sess = database.session
sub = SubmissionFactory()
sess.add(sub)
# D1 generation waiting to be picked up by the Validator
job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['file_upload'],
file_type_id=FILE_TYPE_DICT['award_procurement'], submission=sub, error_message='',
filename='job_id/file.csv', original_filename='file.csv')
val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['award_procurement'], submission=sub, error_message='',
number_of_errors=0)
sess.add_all([job, val_job])
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# D1 generation running in the Validator
job.job_status_id = JOB_STATUS_DICT['running']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# D1 generation with an unknown error
job.job_status_id = JOB_STATUS_DICT['failed']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'Upload job failed without error message'
# D1 generation with a known error
job.error_message = 'D1 upload error message'
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'D1 upload error message'
# D1 generation completed by the Validator; validation waiting to be picked up
job.error_message = ''
job.job_status_id = JOB_STATUS_DICT['finished']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# D1 generation completed; validation running in the Validator
val_job.job_status_id = JOB_STATUS_DICT['running']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'waiting'
# D1 generation completed; validation completed by the Validator
val_job.job_status_id = JOB_STATUS_DICT['finished']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'finished'
# D1 generation completed; validation completed by the Validator
val_job.number_of_errors = 10
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'Validation completed but row-level errors were found'
# D1 generation completed; validation with an unknown error
job.error_message = ''
val_job.error_message = ''
val_job.job_status_id = JOB_STATUS_DICT['failed']
val_job.number_of_errors = 0
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'Validation job had an internal error'
# D1 generation completed; validation with a known error
job.error_message = ''
val_job.error_message = ''
val_job.error_message = 'D1 upload error message'
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'D1 upload error message'
# D1 generation completed; validation with an unknown error
job.error_message = ''
val_job.error_message = ''
val_job.job_status_id = JOB_STATUS_DICT['invalid']
sess.commit()
response_dict = check_file_generation(job.job_id)
assert response_dict['status'] == 'failed'
assert response_dict['message'] == 'Generated file had file-level errors'
@pytest.mark.usefixtures("job_constants")
def test_copy_file_generation_to_job(monkeypatch, database):
sess = database.session
original_filename = 'new_filename.csv'
file_path = gen_file_path_from_submission('None', original_filename)
job = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
file_type_id=FILE_TYPE_DICT['award'])
file_gen = FileGenerationFactory(file_type='D1', file_path=file_path)
sess.add_all([job, file_gen])
sess.commit()
monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
copy_file_generation_to_job(job, file_gen, True)
sess.refresh(job)
sess.refresh(file_gen)
assert job.job_status.name == 'finished'
assert job.filename == gen_file_path_from_submission(job.submission_id, original_filename)
assert job.original_filename == original_filename
assert job.number_of_errors == 0
assert job.number_of_warnings == 0
assert job.file_generation_id == file_gen.file_generation_id
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_ef_valid(database):
""" Tests a set of conditions that passes the prerequisite checks to allow E/F files to be generated. Show that
warnings do not prevent generation.
"""
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
cross_val = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['validation'],
file_type_id=None, job_status_id=JOB_STATUS_DICT['finished'], number_of_errors=0,
number_of_warnings=1, error_message=None)
sess.add_all([sub, cross_val])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'E')
assert can_generate is True
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_ef_not_finished(database):
""" Tests a set of conditions that has cross-file still waiting, fail the generation check for E/F files. """
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
cross_val = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['validation'], file_type_id=None,
job_status_id=JOB_STATUS_DICT['waiting'], number_of_errors=0, number_of_warnings=0,
error_message=None)
sess.add_all([sub, cross_val])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'E')
assert can_generate is False
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_ef_has_errors(database):
""" Tests a set of conditions that has an error in cross-file, fail the generation check for E/F files. """
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
cross_val = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['validation'], file_type_id=None,
job_status_id=JOB_STATUS_DICT['finished'], number_of_errors=1, number_of_warnings=0,
error_message=None)
sess.add_all([sub, cross_val])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'E')
assert can_generate is False
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_d_valid(database):
""" Tests a set of conditions that passes the prerequisite checks to allow D files to be generated. Show that
warnings do not prevent generation.
"""
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
job_1 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['appropriations'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
job_2 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['program_activity'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
job_3 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['award_financial'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=1, error_message=None)
sess.add_all([sub, job_1, job_2, job_3])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'D1')
assert can_generate is True
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_d_not_finished(database):
""" Tests a set of conditions that has one of the A,B,C files incomplete, prevent D file generation. """
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
job_1 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['appropriations'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
job_2 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['program_activity'], job_status_id=JOB_STATUS_DICT['waiting'],
number_of_errors=0, number_of_warnings=0, error_message=None)
job_3 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['award_financial'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
sess.add_all([sub, job_1, job_2, job_3])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'D1')
assert can_generate is False
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_d_has_errors(database):
""" Tests a set of conditions that has an error in one of the A,B,C files, prevent D file generation. """
sess = database.session
sub = SubmissionFactory(submission_id=1, d2_submission=False)
job_1 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['appropriations'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=1, number_of_warnings=0, error_message=None)
job_2 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['program_activity'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
job_3 = JobFactory(submission_id=sub.submission_id, job_type_id=JOB_TYPE_DICT['csv_record_validation'],
file_type_id=FILE_TYPE_DICT['award_financial'], job_status_id=JOB_STATUS_DICT['finished'],
number_of_errors=0, number_of_warnings=0, error_message=None)
sess.add_all([sub, job_1, job_2, job_3])
sess.commit()
can_generate = check_generation_prereqs(sub.submission_id, 'D1')
assert can_generate is False
@pytest.mark.usefixtures("job_constants")
def test_check_generation_prereqs_bad_type(database):
""" Tests that check_generation_prereqs raises an error if an invalid type is provided. """
sess = database.session
sub = SubmissionFactory()
sess.add(sub)
sess.commit()
with pytest.raises(ResponseException):
check_generation_prereqs(sub.submission_id, 'A')
def gen_file_path_from_submission(submission, original_filename):
local_filepath = CONFIG_BROKER['broker_files']
nonlocal_filepath = '{}/'.format(submission)
return '{}{}'.format(local_filepath if CONFIG_BROKER['local'] else nonlocal_filepath, original_filename)
| 49.151724
| 119
| 0.735829
| 3,945
| 28,508
| 4.963752
| 0.052471
| 0.039526
| 0.028547
| 0.030028
| 0.904759
| 0.876775
| 0.870902
| 0.85931
| 0.843223
| 0.839087
| 0
| 0.026893
| 0.157394
| 28,508
| 579
| 120
| 49.236615
| 0.78831
| 0.082608
| 0
| 0.736239
| 0
| 0
| 0.109862
| 0.010509
| 0
| 0
| 0
| 0
| 0.183486
| 1
| 0.050459
| false
| 0
| 0.022936
| 0
| 0.075688
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a4fcf42a0bca2bb97b20a8c17c29d1313528b2b
| 2,216
|
py
|
Python
|
tests/test_tta.py
|
anshulrai/pytorch-toolbelt
|
933d59eb5d0916170b3d467f68af615064dbf7a1
|
[
"MIT"
] | null | null | null |
tests/test_tta.py
|
anshulrai/pytorch-toolbelt
|
933d59eb5d0916170b3d467f68af615064dbf7a1
|
[
"MIT"
] | null | null | null |
tests/test_tta.py
|
anshulrai/pytorch-toolbelt
|
933d59eb5d0916170b3d467f68af615064dbf7a1
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
from pytorch_toolbelt.inference import tta
from pytorch_toolbelt.utils.torch_utils import to_numpy
from torch import nn
class NoOp(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input):
return input
class SumAll(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input):
return input.sum(dim=[1, 2, 3])
def test_d4_image2mask():
input = torch.rand((4, 3, 224, 224))
model = NoOp()
output = tta.d4_image2mask(model, input)
np.testing.assert_allclose(to_numpy(output), to_numpy(input), atol=1e-6, rtol=1e-6)
def test_fliplr_image2mask():
input = torch.rand((4, 3, 224, 224))
model = NoOp()
output = tta.fliplr_image2mask(model, input)
np.testing.assert_allclose(to_numpy(output), to_numpy(input), atol=1e-6, rtol=1e-6)
def test_d4_image2label():
input = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.d4_image2label(model, input)
expected = int(input.sum())
assert int(output) == expected
def test_fliplr_image2label():
input = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.fliplr_image2label(model, input)
expected = int(input.sum())
assert int(output) == expected
def test_fivecrop_image2label():
input = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.fivecrop_image2label(model, input, (2, 2))
expected = ((1 + 2 + 5 + 6) + (3 + 4 + 7 + 8) + (9 + 0 + 3 + 4) + (1 + 2 + 5 + 6) + (6 + 7 + 0 + 1)) / 5
assert int(output) == expected
def test_tencrop_image2label():
input = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.tencrop_image2label(model, input, (2, 2))
expected = (2 * ((1 + 2 + 5 + 6) + (3 + 4 + 7 + 8) + (9 + 0 + 3 + 4) + (1 + 2 + 5 + 6) + (6 + 7 + 0 + 1))) / 10
assert int(output) == expected
| 28.410256
| 116
| 0.59296
| 347
| 2,216
| 3.659942
| 0.170029
| 0.020472
| 0.02126
| 0.025197
| 0.801575
| 0.801575
| 0.729134
| 0.729134
| 0.729134
| 0.729134
| 0
| 0.094329
| 0.220217
| 2,216
| 77
| 117
| 28.779221
| 0.640625
| 0
| 0
| 0.530612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 1
| 0.204082
| false
| 0
| 0.102041
| 0.040816
| 0.387755
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a5784af8927fec43b45953267c43d2c05b4dbcd
| 121
|
py
|
Python
|
deephyper/search/nas/baselines/bench/__init__.py
|
jtchilders/deephyper
|
06f9653599757a69fa5720820f4de3a1f154b081
|
[
"BSD-3-Clause"
] | 2
|
2020-08-26T09:15:27.000Z
|
2020-08-26T09:19:13.000Z
|
deephyper/search/nas/baselines/bench/__init__.py
|
jtchilders/deephyper
|
06f9653599757a69fa5720820f4de3a1f154b081
|
[
"BSD-3-Clause"
] | null | null | null |
deephyper/search/nas/baselines/bench/__init__.py
|
jtchilders/deephyper
|
06f9653599757a69fa5720820f4de3a1f154b081
|
[
"BSD-3-Clause"
] | null | null | null |
from deephyper.search.nas.baselines.bench.benchmarks import *
from deephyper.search.nas.baselines.bench.monitor import *
| 40.333333
| 61
| 0.834711
| 16
| 121
| 6.3125
| 0.5625
| 0.257426
| 0.376238
| 0.435644
| 0.712871
| 0.712871
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066116
| 121
| 2
| 62
| 60.5
| 0.893805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
4a8f4bec7f81880e53c4eaf5010ffe1ec4fbb0e2
| 90
|
py
|
Python
|
common.py
|
Mause/media
|
8f2d8c0e4dc620eb3f7d2b9095c4b564ff4a82a6
|
[
"MIT"
] | 2
|
2021-07-01T08:36:59.000Z
|
2021-09-06T12:28:35.000Z
|
common.py
|
Mause/media
|
8f2d8c0e4dc620eb3f7d2b9095c4b564ff4a82a6
|
[
"MIT"
] | 78
|
2020-10-06T15:56:42.000Z
|
2021-11-03T23:23:18.000Z
|
common.py
|
Mause/media
|
8f2d8c0e4dc620eb3f7d2b9095c4b564ff4a82a6
|
[
"MIT"
] | 1
|
2020-11-02T00:52:20.000Z
|
2020-11-02T00:52:20.000Z
|
from alembic import op
def get_driver():
return op.get_bind().engine.url.drivername
| 15
| 46
| 0.744444
| 14
| 90
| 4.642857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 90
| 5
| 47
| 18
| 0.855263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
43c62ff45a776b2882c21179738b5b99a1ce15b5
| 44,617
|
py
|
Python
|
potion/algorithms/safe.py
|
T3p/potion
|
c6349111e3f2d2a32e85dc29e052f36cf36edcdc
|
[
"MIT"
] | null | null | null |
potion/algorithms/safe.py
|
T3p/potion
|
c6349111e3f2d2a32e85dc29e052f36cf36edcdc
|
[
"MIT"
] | null | null | null |
potion/algorithms/safe.py
|
T3p/potion
|
c6349111e3f2d2a32e85dc29e052f36cf36edcdc
|
[
"MIT"
] | 1
|
2019-09-08T15:11:55.000Z
|
2019-09-08T15:11:55.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Policy Gradient algorithms with monotonic improvement guarantees
"""
from potion.simulation.trajectory_generators import generate_batch
from potion.common.misc_utils import (performance, avg_horizon, mean_sum_info,
clip, seed_all_agent, returns, separator)
from potion.estimation.gradients import gpomdp_estimator, reinforce_estimator
from potion.common.logger import Logger
from potion.estimation.jackknife import jackknife
import scipy.stats as sts
import torch
import time
import math
def spg(env, policy, horizon, lip_const, err_bound, *,
fail_prob = 0.05,
mini_batchsize = 10,
max_batchsize = 10000,
iterations = float('inf'),
max_samples = 1e6,
disc = 0.9,
fast = False,
action_filter = None,
estimator = 'gpomdp',
baseline = 'peters',
logger = Logger(name='SPG'),
shallow = True,
seed = None,
save_params = 1000,
log_params = True,
log_grad = False,
parallel = False,
render = False,
oracle = None,
verbose = 1):
"""
Safe PG algorithm with adaptive batch size from
"Smoothing Policies and Safe Policy Gradients", Papini et al., 2019
env: environment
policy: the one to improve
horizon: maximum task horizon
lip_const: Lipschitz constant of the gradient (upper bound)
err_bound: statistical upper bound on the PG estimation error (function of
probability)
conf: probability of failure
min_batchsize: minimum number of trajectories to estimate policy gradient
max_batchsize: maximum number of trajectories to estimate policy gradient
iterations: maximum number of learning iterations
max_samples: maximum number of total collected trajectories
disc: discount factor
action_filter: function to apply to the agent's action before feeding it to
the environment, not considered in gradient estimation. By default,
the action is clipped to satisfy evironmental boundaries
estimator: either 'reinforce' or 'gpomdp' (default). The latter typically
suffers from less variance
baseline: control variate to be used in the gradient estimator. Either
'avg' (average reward, default), 'peters' (variance-minimizing) or
'zero' (no baseline)
logger: for human-readable logs (standard output, csv, tensorboard...)
shallow: whether to employ pre-computed score functions (only available for
shallow policies)
meta_conf: confidence level of safe-update test (for evaluation only)
seed: random seed (None for random behavior)
test_batchsize: number of test trajectories used to evaluate the
corresponding deterministic policy at each iteration. If 0 or False, no
test is performed
info_key: name of the environment info to log
save_params: how often (every x iterations) to save the policy
parameters to disk. Final parameters are always saved for
x>0. If False, they are never saved.
log_params: whether to include policy parameters in the human-readable logs
log_grad: whether to include gradients in the human-readable logs
parallel: number of parallel jobs for simulation. If 0 or False,
sequential simulation is performed.
render: how often (every x iterations) to render the agent's behavior
on a sample trajectory. If False, no rendering happens
verbose: level of verbosity on standard output
"""
#Defaults
if action_filter is None:
action_filter = clip(env)
#Seed agent
if seed is not None:
seed_all_agent(seed)
#Prepare logger
algo_info = {'Algorithm': 'SPG',
'Estimator': estimator,
'Baseline': baseline,
'Env': str(env),
'Horizon': horizon,
'Discount': disc,
'Confidence': 1 - fail_prob,
'Seed': seed,
'MiniBatchSize': mini_batchsize,
'MaxBatchSize': max_batchsize,
'LipschitzConstant': lip_const,
'ErrorBound': err_bound,
}
logger.write_info({**algo_info, **policy.info()})
log_keys = ['Perf',
'UPerf',
'AvgHorizon',
'StepSize',
'GradNorm',
'Time',
'StepSize',
'BatchSize',
'TotSamples']
if oracle is not None:
log_keys += ['Oracle']
if log_params:
log_keys += ['param%d' % i for i in range(policy.num_params())]
if log_grad:
log_keys += ['grad%d' % i for i in range(policy.num_params())]
log_row = dict.fromkeys(log_keys)
logger.open(log_row.keys())
#Initializations
it = 1
tot_samples = 0
_estimator = (reinforce_estimator if estimator=='reinforce'
else gpomdp_estimator)
#Learning loop
while(it < iterations and tot_samples < max_samples):
start = time.time()
if verbose:
print('\n* Iteration %d *' % it)
params = policy.get_flat()
#Render the agent's behavior
if render and it % render==0:
generate_batch(env, policy, horizon,
episodes=1,
action_filter=action_filter,
render=True)
#Collect trajectories to match optimal safe batch size
batch = []
batchsize = 0
delta = fail_prob / (it * (it + 1))
do = True
i = 0
while do or batchsize + mini_batchsize <= max_batchsize:
do = False
i = i + 1
batch += generate_batch(env, policy, horizon,
episodes=mini_batchsize,
action_filter=action_filter,
n_jobs=parallel)
batchsize = len(batch)
#Estimate policy gradient
grad = _estimator(batch, disc, policy,
baselinekind=baseline,
shallow=shallow,
result='mean')
#Optimal batch size
delta_i = delta / (i * (i + 1))
min_safe_batchsize = torch.ceil(err_bound(delta_i, batchsize) /
torch.norm(grad)**2).item()
if not fast:
optimal_batchsize = torch.ceil(4 * err_bound(delta_i, batchsize) /
torch.norm(grad)**2).item()
else:
optimal_batchsize = min_safe_batchsize
if verbose:
print('Collected %d / %d trajectories' % (batchsize,
optimal_batchsize))
#Collecting more data for the same update?
if batchsize >= optimal_batchsize:
break
if verbose:
print('Optimal batch size: %d' % optimal_batchsize)
#Update long-term quantities
tot_samples += batchsize
#Log
log_row['Perf'] = performance(batch, disc)
log_row['UPerf'] = performance(batch, disc=1.)
log_row['AvgHorizon'] = avg_horizon(batch)
log_row['GradNorm'] = torch.norm(grad).item()
log_row['BatchSize'] = batchsize
log_row['TotSamples'] = tot_samples
if oracle is not None:
log_row['Oracle'] = oracle(params.numpy())
if log_params:
for i in range(policy.num_params()):
log_row['param%d' % i] = params[i].item()
if log_grad:
for i in range(policy.num_params()):
log_row['grad%d' % i] = grad[i].item()
#Log
log_row['StepSize'] = 0.
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Skip to next iteration (current trajectories are discarded)
it += 1
continue
#Select step size
if batchsize >= min_safe_batchsize:
if not fast:
stepsize = 1. / (2 * lip_const)
else:
stepsize = 1. / lip_const
else:
if verbose:
print('Safe update would require more samples than maximum allowed')
stepsize = 0.
log_row['StepSize'] = stepsize
#Update policy parameters
new_params = params + stepsize * grad
policy.set_from_flat(new_params)
#Save parameters
if save_params and it % save_params == 0:
logger.save_params(params, it)
#Next iteration
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
it += 1
#Save final parameters
if save_params:
logger.save_params(params, it)
#Cleanup
logger.close()
def safe_step_strict(env, policy, disc, horizon, lip_const, var_bound,
batchsize = 100,
conf = 0.2,
iterations = float('inf'),
max_samples = 1e7,
action_filter = None,
logger = Logger(name='SafeStepStrict'),
estimator = 'gpomdp',
baseline = 'peters',
shallow = True,
seed = None,
save_params = 10000,
log_params = True,
log_grad = False,
parallel = False,
render = False,
info_key = None,
verbose = 1):
"""
Strict version of Safe PG algorithm with adaptive step size from
"Smoothing Policies and Safe Policy Gradients" (Algorithm 2)
"""
#Defaults
if action_filter is None:
action_filter = clip(env)
if baseline != 'zero':
assert batchsize > 2
#Seed agent
if seed is not None:
seed_all_agent(seed)
#Prepare logger
algo_info = {'Algorithm': 'SafeStepStrict',
'Env': str(env),
'Horizon': horizon,
'Discount': disc,
'Seed': seed,
}
logger.write_info({**algo_info, **policy.info()})
log_keys = ['Perf',
'UPerf',
'AvgHorizon',
'StepSize',
'BatchSize',
'GradNorm',
'Time',
'TotSamples',
'Threshold',
'VarBound']
if log_params:
log_keys += ['param%d' % i for i in range(policy.num_params())]
if log_grad:
log_keys += ['grad%d' % i for i in range(policy.num_params())]
if info_key is not None:
log_keys.append(info_key)
log_row = dict.fromkeys(log_keys)
logger.open(log_row.keys())
#Initializations
_estimator = (reinforce_estimator if estimator=='reinforce'
else gpomdp_estimator)
it = 0
tot_samples = 0
#Learning loop
while(it < iterations and tot_samples < max_samples):
start = time.time()
if verbose:
print('\n* Iteration %d *' % it)
params = policy.get_flat()
#Render the agent's behavior
if render and it % render==0:
generate_batch(env, policy, horizon,
episodes=1,
action_filter=action_filter,
render=True)
#Collect trajectories
batch = generate_batch(env, policy, horizon,
episodes=batchsize,
action_filter=action_filter,
n_jobs=parallel,
key=info_key)
#Estimate policy gradient
grad_samples = _estimator(batch, disc, policy,
baselinekind=baseline,
shallow=shallow,
result='samples')
grad = torch.mean(grad_samples, 0)
grad_norm = torch.norm(grad)
#Update long-term quantities
tot_samples += batchsize
#Safety test
threshold = torch.ceil(var_bound / (conf * grad_norm**2))
#Log
log_row['BatchSize'] = batchsize
log_row['VarBound'] = var_bound
log_row['LipConst'] = lip_const
log_row['Threshold'] = threshold.item()
log_row['Perf'] = performance(batch, disc)
log_row['Info'] = mean_sum_info(batch).item()
log_row['UPerf'] = performance(batch, disc=1.)
log_row['AvgHorizon'] = avg_horizon(batch)
log_row['GradNorm'] = grad_norm.item()
log_row['TotSamples'] = tot_samples
if log_params:
for i in range(policy.num_params()):
log_row['param%d' % i] = params[i].item()
if log_grad:
for i in range(policy.num_params()):
log_row['grad%d' % i] = grad[i].item()
#Safety test
if batchsize < threshold:
#Log
log_row['StepSize'] = 0.
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Terminate
if verbose:
print ('Not safe! Need at least %d samples'
% int(threshold.item()))
break
#Select step size
stepsize = (1. - math.sqrt(var_bound / (conf * batchsize))
/ grad_norm) / lip_const
log_row['StepSize'] = stepsize.item()
#Update policy parameters
new_params = params + stepsize * grad
policy.set_from_flat(new_params)
#Save parameters
if save_params and it % save_params == 0:
logger.save_params(params, it)
#Log
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Prepare next iteration
it += 1
#Save final parameters
if save_params:
logger.save_params(params, it)
#Cleanup
logger.close()
def safe_step(env, policy, disc, horizon, lip_const,
batchsize = 100,
conf = 0.2,
iterations = float('inf'),
max_samples = 1e7,
action_filter = None,
logger = Logger(name='SafeStep'),
estimator = 'gpomdp',
baseline = 'peters',
shallow = True,
seed = None,
save_params = 10000,
log_params = True,
log_grad = False,
parallel = False,
render = False,
info_key = None,
verbose = 1):
"""
Strict version of Safe PG algorithm with adaptive step size from
"Smoothing Policies and Safe Policy Gradients"
"""
#Defaults
if action_filter is None:
action_filter = clip(env)
if baseline != 'zero':
assert batchsize > 2
#Seed agent
if seed is not None:
seed_all_agent(seed)
#Prepare logger
algo_info = {'Algorithm': 'SafeStep',
'Env': str(env),
'Horizon': horizon,
'Discount': disc,
'Seed': seed,
}
logger.write_info({**algo_info, **policy.info()})
log_keys = ['Perf',
'UPerf',
'AvgHorizon',
'StepSize',
'BatchSize',
'GradNorm',
'Time',
'TotSamples',
'Threshold',
'VarBound']
if log_params:
log_keys += ['param%d' % i for i in range(policy.num_params())]
if log_grad:
log_keys += ['grad%d' % i for i in range(policy.num_params())]
if info_key is not None:
log_keys.append(info_key)
log_row = dict.fromkeys(log_keys)
logger.open(log_row.keys())
#Initializations
_estimator = (reinforce_estimator if estimator=='reinforce'
else gpomdp_estimator)
it = 0
tot_samples = 0
#Learning loop
while(it < iterations and tot_samples < max_samples):
start = time.time()
if verbose:
print('\n* Iteration %d *' % it)
params = policy.get_flat()
#Render the agent's behavior
if render and it % render==0:
generate_batch(env, policy, horizon,
episodes=1,
action_filter=action_filter,
render=True)
#Collect trajectories
batch = generate_batch(env, policy, horizon,
episodes=batchsize,
action_filter=action_filter,
n_jobs=parallel,
key=info_key)
#Estimate policy gradient
grad_samples = _estimator(batch, disc, policy,
baselinekind=baseline,
shallow=shallow,
result='samples')
grad = torch.mean(grad_samples, dim=0)
grad_norm = torch.norm(grad)
#Estimate variance
var_estimator = lambda samples: torch.std(samples, dim=0,
unbiased=True)**2
jack_mean, jack_var = jackknife(var_estimator, grad_samples)
quantile = sts.t.ppf(1 - conf/2, batchsize)
var_bound = jack_mean + torch.sqrt(jack_var) * quantile
#Update long-term quantities
tot_samples += batchsize
#Safety test
threshold = torch.ceil(2 * var_bound / (conf * grad_norm**2))
#Log
log_row['BatchSize'] = batchsize
log_row['VarBound'] = var_bound.item()
log_row['LipConst'] = lip_const
log_row['Threshold'] = threshold.item()
log_row['Perf'] = performance(batch, disc)
log_row['Info'] = mean_sum_info(batch).item()
log_row['UPerf'] = performance(batch, disc=1.)
log_row['AvgHorizon'] = avg_horizon(batch)
log_row['GradNorm'] = grad_norm.item()
log_row['TotSamples'] = tot_samples
if log_params:
for i in range(policy.num_params()):
log_row['param%d' % i] = params[i].item()
if log_grad:
for i in range(policy.num_params()):
log_row['grad%d' % i] = grad[i].item()
#Safety test
if batchsize < threshold:
#Log
log_row['StepSize'] = 0.
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Terminate
if verbose:
print ('Not safe! Need at least %d samples'
% int(threshold.item()))
break
#Select step size
stepsize = (1. - math.sqrt(2 * var_bound / (conf * batchsize))
/ grad_norm) / lip_const
log_row['StepSize'] = stepsize.item()
#Update policy parameters
new_params = params + stepsize * grad
policy.set_from_flat(new_params)
#Save parameters
if save_params and it % save_params == 0:
logger.save_params(params, it)
#Log
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Prepare next iteration
it += 1
#Save final parameters
if save_params:
logger.save_params(params, it)
#Cleanup
logger.close()
def legacy_adastep(env, policy, horizon, pen_coeff, var_bound, *,
conf = 0.2,
batchsize = 5000,
iterations = float('inf'),
max_samples = 1e6,
disc = 0.9,
action_filter = None,
estimator = 'gpomdp',
baseline = 'peters',
logger = Logger(name='AdaStep'),
shallow = True,
meta_conf = 0.05,
seed = None,
test_batchsize = False,
info_key = 'danger',
save_params = 10000,
log_params = True,
log_grad = False,
parallel = False,
render = False,
verbose = 1):
"""
Safe PG algorithm from "Adaptive Step Size for Policy Gradient Methods",
Pirotta et al., 2013.
Only for Gaussian policies.
env: environment
policy: the one to improve
horizon: maximum task horizon
pen_coeff: penalty coefficient for policy update
var_bound: upper bound on the variance of the PG estimator
conf: probability of failure
batchsize: number of trajectories to estimate policy gradient
iterations: maximum number of learning iterations
max_samples: maximum number of total trajectories
disc: discount factor
action_filter: function to apply to the agent's action before feeding it to
the environment, not considered in gradient estimation. By default,
the action is clipped to satisfy evironmental boundaries
estimator: either 'reinforce' or 'gpomdp' (default). The latter typically
suffers from less variance
baseline: control variate to be used in the gradient estimator. Either
'avg' (average reward, default), 'peters' (variance-minimizing) or
'zero' (no baseline)
logger: for human-readable logs (standard output, csv, tensorboard...)
shallow: whether to employ pre-computed score functions (only available for
shallow policies)
meta_conf: confidence level of safe-update test (for evaluation only)
seed: random seed (None for random behavior)
test_batchsize: number of test trajectories used to evaluate the
corresponding deterministic policy at each iteration. If 0 or False, no
test is performed
info_key: name of the environment info to log
save_params: how often (every x iterations) to save the policy
parameters to disk. Final parameters are always saved for
x>0. If False, they are never saved.
log_params: whether to include policy parameters in the human-readable logs
log_grad: whether to include gradients in the human-readable logs
parallel: number of parallel jobs for simulation. If 0 or False,
sequential simulation is performed.
render: how often (every x iterations) to render the agent's behavior
on a sample trajectory. If False, no rendering happens
verbose: level of verbosity on standard output
"""
#Defaults
if action_filter is None:
action_filter = clip(env)
#Seed agent
if seed is not None:
seed_all_agent(seed)
#Prepare logger
algo_info = {'Algorithm': 'AdaStep',
'Estimator': estimator,
'Baseline': baseline,
'Env': str(env),
'Horizon': horizon,
'Discount': disc,
'Confidence': conf,
'ConfidenceParam': conf,
'Seed': seed,
'BatchSize': batchsize,
'PenalizationCoefficient': pen_coeff,
'VarianceBound': var_bound
}
logger.write_info({**algo_info, **policy.info()})
log_keys = ['Perf',
'UPerf',
'AvgHorizon',
'StepSize',
'GradNorm',
'Time',
'StepSize',
'BatchSize',
'Info',
'TotSamples',
'Safety']
if log_params:
log_keys += ['param%d' % i for i in range(policy.num_params())]
if log_grad:
log_keys += ['grad%d' % i for i in range(policy.num_params())]
if test_batchsize:
log_keys += ['TestPerf', 'TestPerf', 'TestInfo']
log_row = dict.fromkeys(log_keys)
logger.open(log_row.keys())
#Initializations
it = 0
tot_samples = 0
safety = 1.
_estimator = (reinforce_estimator if estimator=='reinforce'
else gpomdp_estimator)
updated = False
updates = 0
unsafe_updates = 0
eps = math.sqrt(var_bound / conf)
#Learning loop
while(it < iterations and tot_samples < max_samples):
start = time.time()
if verbose:
print('\n* Iteration %d *' % it)
params = policy.get_flat()
#Test the corresponding deterministic policy
if test_batchsize:
test_batch = generate_batch(env, policy, horizon,
episodes=test_batchsize,
action_filter=action_filter,
n_jobs=parallel,
deterministic=True,
key=info_key)
log_row['TestPerf'] = performance(test_batch, disc)
log_row['UTestPerf'] = performance(test_batch, 1)
log_row['TestInfo'] = mean_sum_info(test_batch).item()
#Render the agent's behavior
if render and it % render==0:
generate_batch(env, policy, horizon,
episodes=1,
action_filter=action_filter,
render=True)
#Collect trajectories according to fixed batch size
batch = generate_batch(env, policy, horizon,
episodes=batchsize,
action_filter=action_filter,
n_jobs=parallel,
key=info_key)
#Estimate policy gradient
grad_samples = _estimator(batch, disc, policy,
baselinekind=baseline,
shallow=shallow,
result='samples')
grad = torch.mean(grad_samples, 0)
lower = torch.clamp(torch.abs(grad) - eps / math.sqrt(batchsize), 0,
float('inf'))
upper = torch.abs(grad) + eps / math.sqrt(batchsize)
#Update long-term quantities
tot_samples += batchsize
#Update safety measure
if updates == 0:
old_rets= returns(batch, disc)
elif updated:
new_rets = returns(batch, disc)
tscore, pval = sts.ttest_ind(old_rets, new_rets)
if pval / 2 < meta_conf and tscore > 0:
unsafe_updates += 1
if verbose:
print('The previous update was unsafe! (p-value = %f)'
% (pval / 2))
old_rets = new_rets
safety = 1 - unsafe_updates / updates
#Log
log_row['Safety'] = safety
log_row['Perf'] = performance(batch, disc)
log_row['Info'] = mean_sum_info(batch).item()
log_row['UPerf'] = performance(batch, disc=1.)
log_row['AvgHorizon'] = avg_horizon(batch)
log_row['GradNorm'] = torch.norm(grad).item()
log_row['BatchSize'] = batchsize
log_row['TotSamples'] = tot_samples
if log_params:
for i in range(policy.num_params()):
log_row['param%d' % i] = params[i].item()
if log_grad:
for i in range(policy.num_params()):
log_row['grad%d' % i] = grad[i].item()
#Check if number of samples is sufficient to perform update
if torch.norm(lower) == 0:
updated = False
if verbose:
print('No update, would require more samples')
#Select step size
stepsize = (torch.norm(lower)**2 /
(2 * pen_coeff * torch.sum(upper)**2)).item()
log_row['StepSize'] = stepsize
#Update policy parameters
new_params = params + stepsize * grad
policy.set_from_flat(new_params)
updated = True
updates += 1
#Save parameters
if save_params and it % save_params == 0:
logger.save_params(params, it)
#Next iteration
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
it += 1
#Save final parameters
if save_params:
logger.save_params(params, it)
#Cleanup
logger.close()
def legacy_adabatch(env, policy, horizon, pen_coeff, *,
bound = 'bernstein',
var_bound = None,
grad_range = None,
fail_prob = 0.05,
min_batchsize = 32,
max_batchsize = 10000,
iterations = float('inf'),
max_samples = 1e6,
disc = 0.9,
action_filter = None,
estimator = 'gpomdp',
baseline = 'peters',
logger = Logger(name='AdaBatch'),
shallow = True,
meta_conf = 0.05,
seed = None,
test_batchsize = False,
info_key = 'danger',
oracle = None,
save_params = 10000,
log_params = True,
log_grad = False,
parallel = False,
render = False,
verbose = 1):
"""
Safe PG algorithm from "Adaptive Batch Size for Safe Policy Gradients",
Papini et al., 2017.
Only for Gaussian policies.
env: environment
policy: the one to improve
horizon: maximum task horizon
pen_coeff: penalty coefficient for policy update
bound: statistical inequality used to determine optimal batchsize
(chebyshev/student/hoeffding/bernstein)
var_bound: upper bound on the variance of the PG estimator. Must not be
None if Chebyshev's bound is employed
grad_range: theoretical range of gradient estimate. If none, it is
estimated from data (in a biased way)
conf: probability of failure
min_batchsize: minimum number of trajectories to estimate policy gradient
max_batchsize: maximum number of trajectories to estimate policy gradient
iterations: number of policy updates
max_samples: maximum number of total trajectories
disc: discount factor
action_filter: function to apply to the agent's action before feeding it to
the environment, not considered in gradient estimation. By default,
the action is clipped to satisfy evironmental boundaries
estimator: either 'reinforce' or 'gpomdp' (default). The latter typically
suffers from less variance
baseline: control variate to be used in the gradient estimator. Either
'avg' (average reward, default), 'peters' (variance-minimizing) or
'zero' (no baseline)
logger: for human-readable logs (standard output, csv, tensorboard...)
shallow: whether to employ pre-computed score functions (only available for
shallow policies)
meta_conf: confidence level of safe-update test (for evaluation only)
seed: random seed (None for random behavior)
test_batchsize: number of test trajectories used to evaluate the
corresponding deterministic policy at each iteration. If 0 or False, no
test is performed
info_key: name of the environment info to log
save_params: how often (every x iterations) to save the policy
parameters to disk. Final parameters are always saved for
x>0. If False, they are never saved.
log_params: whether to include policy parameters in the human-readable logs
log_grad: whether to include gradients in the human-readable logs
parallel: number of parallel jobs for simulation. If 0 or False,
sequential simulation is performed.
render: how often (every x iterations) to render the agent's behavior
on a sample trajectory. If False, no rendering happens
verbose: level of verbosity
"""
#Defaults
if action_filter is None:
action_filter = clip(env)
if bound == 'chebyshev' and var_bound is None:
raise NotImplementedError
empirical_range = (grad_range is None)
#Seed agent
if seed is not None:
seed_all_agent(seed)
#Prepare logger
algo_info = {'Algorithm': 'AdaBatch',
'Estimator': estimator,
'Baseline': baseline,
'Env': str(env),
'Horizon': horizon,
'Discount': disc,
'FailProb': fail_prob,
'Seed': seed,
'MinBatchSize': min_batchsize,
'MaxBatchSize': max_batchsize,
'PenalizationCoefficient': pen_coeff,
'VarianceBound': var_bound,
'Bound': bound
}
logger.write_info({**algo_info, **policy.info()})
log_keys = ['Perf',
'UPerf',
'AvgHorizon',
'StepSize',
'GradNorm',
'Time',
'StepSize',
'BatchSize',
'Info',
'TotSamples',
'GradVar',
'GradRange',
'Safety',
'Err',
'GradInfNorm']
if oracle is not None:
log_keys += ['Oracle']
if log_params:
log_keys += ['param%d' % i for i in range(policy.num_params())]
if log_grad:
log_keys += ['grad%d' % i for i in range(policy.num_params())]
if test_batchsize:
log_keys += ['TestPerf', 'TestPerf', 'TestInfo']
log_row = dict.fromkeys(log_keys)
logger.open(log_row.keys())
#Initializations
it = 1
tot_samples = 0
safety = 1.
optimal_batchsize = min_batchsize
_estimator = (reinforce_estimator if estimator=='reinforce'
else gpomdp_estimator)
updated = False
updates = 0
unsafe_updates = 0
params = policy.get_flat()
max_grad = torch.zeros_like(params) - float('inf')
min_grad = torch.zeros_like(params) + float('inf')
#Learning loop
while(it < iterations and tot_samples < max_samples):
start = time.time()
if verbose:
print('\n* Iteration %d *' % it)
params = policy.get_flat()
delta = fail_prob / (it * (it + 1))
#Test the corresponding deterministic policy
if test_batchsize:
test_batch = generate_batch(env, policy, horizon,
episodes=test_batchsize,
action_filter=action_filter,
n_jobs=parallel,
deterministic=True,
key=info_key)
log_row['TestPerf'] = performance(test_batch, disc)
log_row['UTestPerf'] = performance(test_batch, 1)
log_row['TestInfo'] = mean_sum_info(test_batch).item()
#Render the agent's behavior
if render and it % render==0:
generate_batch(env, policy, horizon,
episodes=1,
action_filter=action_filter,
render=True)
#Collect trajectories according to previous optimal batch size
batch = generate_batch(env, policy, horizon,
episodes=max(min_batchsize,
min(max_batchsize,
optimal_batchsize)),
action_filter=action_filter,
n_jobs=parallel,
key=info_key)
batchsize = len(batch)
#Estimate policy gradient
grad_samples = _estimator(batch, disc, policy,
baselinekind=baseline,
shallow=shallow,
result='samples')
grad = torch.mean(grad_samples, 0)
grad_infnorm = torch.max(torch.abs(grad))
coordinate = torch.min(torch.argmax(torch.abs(grad))).item()
#Compute statistics for estimation error
if bound in ['bernstein', 'student']:
grad_var = torch.var(grad_samples, 0, unbiased = True)
grad_var = torch.max(grad_var).item()
log_row['GradVar'] = grad_var
else:
log_row['GradVar'] = var_bound
if bound in ['bernstein', 'hoeffding'] and empirical_range:
max_grad = torch.max(grad, max_grad)
min_grad = torch.min(min_grad, grad)
grad_range = torch.max(max_grad - min_grad).item()
if grad_range <= 0:
grad_range = torch.max(2 * abs(max_grad)).item()
log_row['GradRange'] = grad_range
#Compute estimation error
if bound == 'chebyshev':
eps = math.sqrt(var_bound / delta)
elif bound == 'student':
quant = sts.t.ppf(1 - delta, batchsize)
eps = quant * math.sqrt(grad_var)
elif bound == 'hoeffding':
eps = grad_range * math.sqrt(math.log(2. / delta) / 2)
elif bound == 'bernstein':
eps = math.sqrt(2 * grad_var * math.log(3. / delta))
eps2 = 3 * grad_range * math.log(3. / delta)
#Compute optimal batch size
if bound in ['chebyshev', 'student', 'hoeffding']:
optimal_batchsize = math.ceil(((13 + 3 * math.sqrt(17)) * eps**2 /
(2 * grad_infnorm**2)).item())
min_safe_batchsize = math.ceil((eps**2 / grad_infnorm**2).item())
else:
min_safe_batchsize = math.ceil(((eps + math.sqrt(eps**2
+ 4 * eps2
* grad_infnorm))
/ (2 * grad_infnorm))**2)
optimal_batchsize = min_safe_batchsize
_stepsize = ((grad_infnorm - eps / math.sqrt(optimal_batchsize)
- eps2 / optimal_batchsize)**2
/ (2 * pen_coeff * (grad_infnorm + eps
/ math.sqrt(optimal_batchsize)
+ eps2 / optimal_batchsize)**2)).item()
ups = (grad_infnorm**2 * _stepsize * (1 - pen_coeff * _stepsize)
/ optimal_batchsize)
old_ups = -float('inf')
while ups > old_ups:
optimal_batchsize += 1
old_ups = ups
_stepsize = ((grad_infnorm - eps / math.sqrt(optimal_batchsize)
- eps2 / optimal_batchsize)**2
/ (2 * pen_coeff * (grad_infnorm + eps
/ math.sqrt(optimal_batchsize)
+ eps2 / optimal_batchsize)**2)).item()
ups = (grad_infnorm**2 * _stepsize
* (1 - pen_coeff * _stepsize)
/ optimal_batchsize)
optimal_batchsize -= 1
if verbose:
print('Optimal batch size: %d' % optimal_batchsize)
#Update long-term quantities
tot_samples += batchsize
#Update safety measure
if updates == 0:
old_rets= returns(batch, disc)
elif updated:
new_rets = returns(batch, disc)
tscore, pval = sts.ttest_ind(old_rets, new_rets)
if pval / 2 < meta_conf and tscore > 0:
unsafe_updates += 1
if verbose:
print('The previous update was unsafe! (p-value = %f)'
% (pval / 2))
old_rets = new_rets
safety = 1 - unsafe_updates / updates
#Log
log_row['Err'] = eps
log_row['Safety'] = safety
log_row['Perf'] = performance(batch, disc)
log_row['Info'] = mean_sum_info(batch).item()
log_row['UPerf'] = performance(batch, disc=1.)
log_row['AvgHorizon'] = avg_horizon(batch)
log_row['GradNorm'] = torch.norm(grad).item()
log_row['GradInfNorm'] = grad_infnorm.item()
log_row['BatchSize'] = batchsize
log_row['TotSamples'] = tot_samples
if oracle is not None:
log_row['Oracle'] = oracle(params.numpy())
if log_params:
for i in range(policy.num_params()):
log_row['param%d' % i] = params[i].item()
if log_grad:
for i in range(policy.num_params()):
log_row['grad%d' % i] = grad[i].item()
#Check if number of samples is sufficient to perform update
if grad_infnorm < eps / math.sqrt(batchsize):
updated = False
if verbose:
print('No update, need more samples')
#Log
log_row['StepSize'] = 0.
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
#Skip to next iteration (current trajectories are discarded)
it += 1
continue
#Select step size
if bound == 'bernstein':
stepsize = ((grad_infnorm - eps / math.sqrt(batchsize)
- eps2 / batchsize)**2
/ (2 * pen_coeff * (grad_infnorm + eps
/ math.sqrt(batchsize)
+ eps2 / batchsize)**2)).item()
else:
stepsize = (13 - 3 * math.sqrt(17)) / (4 * pen_coeff)
log_row['StepSize'] = stepsize
#Update policy parameters
new_params = params
new_params[coordinate] = (params[coordinate]
+ stepsize * grad[coordinate])
policy.set_from_flat(new_params)
updated = True
updates += 1
#Save parameters
if save_params and it % save_params == 0:
logger.save_params(params, it)
#Next iteration
log_row['Time'] = time.time() - start
if verbose:
print(separator)
logger.write_row(log_row, it)
if verbose:
print(separator)
it += 1
#Save final parameters
if save_params:
logger.save_params(params, it)
#Cleanup
logger.close()
| 38.069113
| 84
| 0.520317
| 4,591
| 44,617
| 4.910477
| 0.086038
| 0.027147
| 0.020493
| 0.009759
| 0.848252
| 0.832461
| 0.819509
| 0.801499
| 0.796043
| 0.785043
| 0
| 0.0097
| 0.394625
| 44,617
| 1,171
| 85
| 38.101623
| 0.824954
| 0.205146
| 0
| 0.798507
| 1
| 0
| 0.067495
| 0.001323
| 0
| 0
| 0
| 0
| 0.002488
| 1
| 0.006219
| false
| 0
| 0.011194
| 0
| 0.017413
| 0.041045
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43dfac4905bf7d662128ec3836bb585a4521c933
| 11,569
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_afd_rule_scenarios.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 3,287
|
2016-07-26T17:34:33.000Z
|
2022-03-31T09:52:13.000Z
|
src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_afd_rule_scenarios.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 19,206
|
2016-07-26T07:04:42.000Z
|
2022-03-31T23:57:09.000Z
|
src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_afd_rule_scenarios.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 2,575
|
2016-07-26T06:44:40.000Z
|
2022-03-31T22:56:06.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import ResourceGroupPreparer, JMESPathCheck
from azure.cli.testsdk import ScenarioTest, record_only
from .afdx_scenario_mixin import CdnAfdScenarioMixin
from azure.mgmt.cdn.models import SkuName
class CdnAfdRuleScenarioTest(CdnAfdScenarioMixin, ScenarioTest):
@ResourceGroupPreparer()
def test_rule_set_crud(self, resource_group):
profile_name = self.create_random_name(prefix='profile', length=16)
self.afd_rule_set_list_cmd(resource_group, profile_name, expect_failure=True)
self.afd_profile_create_cmd(resource_group, profile_name)
list_checks = [JMESPathCheck('length(@)', 0)]
self.afd_rule_set_list_cmd(resource_group, profile_name, checks=list_checks)
rule_set_name = self.create_random_name(prefix='ruleset', length=16)
self.afd_rule_set_add_cmd(resource_group, rule_set_name, profile_name)
list_checks = [JMESPathCheck('length(@)', 1)]
self.afd_rule_set_list_cmd(resource_group, profile_name, checks=list_checks)
show_checks = [JMESPathCheck('name', rule_set_name),
JMESPathCheck('provisioningState', 'Succeeded')]
self.afd_rule_set_show_cmd(resource_group, rule_set_name, profile_name, checks=show_checks)
self.afd_rule_set_delete_cmd(resource_group, rule_set_name, profile_name)
self.afd_rule_set_show_cmd(resource_group, rule_set_name, profile_name, expect_failure=True)
list_checks = [JMESPathCheck('length(@)', 0)]
self.afd_rule_set_list_cmd(resource_group, profile_name, checks=list_checks)
@ResourceGroupPreparer()
def test_afd_rule_crud(self, resource_group):
profile_name = self.create_random_name(prefix='profile', length=16)
self.afd_profile_create_cmd(resource_group, profile_name)
rule_set_name = self.create_random_name(prefix='ruleset', length=16)
self.afd_rule_set_add_cmd(resource_group, rule_set_name, profile_name)
rule_list_checks = [JMESPathCheck('length(@)', 0)]
self.afd_rule_list_cmd(resource_group, rule_set_name, profile_name, checks=rule_list_checks)
rule_name = 'r1'
rule_checks = [JMESPathCheck('order', 1),
JMESPathCheck('name', rule_name),
JMESPathCheck('length(conditions)', 1),
JMESPathCheck('conditions[0].name', "RemoteAddress"),
JMESPathCheck('conditions[0].parameters.operator', 'GeoMatch'),
JMESPathCheck('conditions[0].parameters.matchValues[0]', 'TH'),
JMESPathCheck('length(actions)', 1),
JMESPathCheck('actions[0].name', "CacheExpiration"),
JMESPathCheck('actions[0].parameters.cacheBehavior', 'BypassCache')]
self.afd_rule_add_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
options='--match-variable RemoteAddress --operator GeoMatch --match-values "TH" --action-name CacheExpiration --cache-behavior BypassCache --order 1')
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
checks=rule_checks)
rule_name1 = 'r2'
rule_checks = [JMESPathCheck('order', 2),
JMESPathCheck('name', rule_name1),
JMESPathCheck('length(conditions)', 1),
JMESPathCheck('conditions[0].name', "RequestScheme"),
JMESPathCheck('conditions[0].parameters.matchValues[0]', 'HTTP'),
JMESPathCheck('length(actions)', 1),
JMESPathCheck('actions[0].name', "UrlRedirect"),
JMESPathCheck('actions[0].parameters.redirectType', "Moved"),
JMESPathCheck('actions[0].parameters.destinationProtocol', 'Https')]
self.afd_rule_add_cmd(resource_group,
rule_set_name,
rule_name1,
profile_name,
options='--match-variable RequestScheme --match-values "HTTP" --action-name UrlRedirect --redirect-protocol Https --redirect-type Moved --order 2')
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name1,
profile_name,
checks=rule_checks)
rule_list_checks = [JMESPathCheck('length(@)', 2)]
self.afd_rule_list_cmd(resource_group, rule_set_name, profile_name, checks=rule_list_checks)
self.afd_rule_delete_cmd(resource_group, rule_set_name, rule_name1, profile_name)
rule_list_checks = [JMESPathCheck('length(@)', 1)]
self.afd_rule_list_cmd(resource_group, rule_set_name, profile_name, checks=rule_list_checks)
rule_checks = [JMESPathCheck('order', 1),
JMESPathCheck('name', rule_name),
JMESPathCheck('length(conditions)', 2),
JMESPathCheck('conditions[1].name', "RemoteAddress"),
JMESPathCheck('conditions[1].parameters.operator', 'GeoMatch'),
JMESPathCheck('conditions[1].parameters.matchValues[0]', 'TH'),
JMESPathCheck('conditions[1].parameters.matchValues[1]', 'US'),
JMESPathCheck('length(actions)', 1),
JMESPathCheck('actions[0].name', "CacheExpiration"),
JMESPathCheck('actions[0].parameters.cacheBehavior', 'BypassCache')]
self.afd_rule_add_condition_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
options='--match-variable RemoteAddress '
'--operator GeoMatch --match-values "TH" "US"')
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
checks=rule_checks)
rule_checks = [JMESPathCheck('order', 1),
JMESPathCheck('name', rule_name),
JMESPathCheck('length(conditions)', 2),
JMESPathCheck('conditions[1].name', "RemoteAddress"),
JMESPathCheck('conditions[1].parameters.operator', 'GeoMatch'),
JMESPathCheck('conditions[1].parameters.matchValues[0]', 'TH'),
JMESPathCheck('conditions[1].parameters.matchValues[1]', 'US'),
JMESPathCheck('length(actions)', 2),
JMESPathCheck('actions[0].name', "CacheExpiration"),
JMESPathCheck('actions[0].parameters.cacheBehavior', 'BypassCache'),
JMESPathCheck('actions[1].name', "UrlRewrite"),
JMESPathCheck('actions[1].parameters.sourcePattern', '/abc'),
JMESPathCheck('actions[1].parameters.destination', '/def')]
self.afd_rule_add_action_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
options='--action-name "UrlRewrite" '
'--source-pattern "/abc" --destination "/def"')
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
checks=rule_checks)
rule_checks = [JMESPathCheck('order', 1),
JMESPathCheck('name', rule_name),
JMESPathCheck('length(conditions)', 1),
JMESPathCheck('conditions[0].name', "RemoteAddress"),
JMESPathCheck('conditions[0].parameters.operator', 'GeoMatch'),
JMESPathCheck('conditions[0].parameters.matchValues[0]', 'TH'),
JMESPathCheck('conditions[0].parameters.matchValues[1]', 'US'),
JMESPathCheck('length(actions)', 2),
JMESPathCheck('actions[0].name', "CacheExpiration"),
JMESPathCheck('actions[0].parameters.cacheBehavior', 'BypassCache'),
JMESPathCheck('actions[1].name', "UrlRewrite"),
JMESPathCheck('actions[1].parameters.sourcePattern', '/abc'),
JMESPathCheck('actions[1].parameters.destination', '/def')]
self.afd_rule_remove_condition_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
0)
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
checks=rule_checks)
rule_checks = [JMESPathCheck('order', 1),
JMESPathCheck('name', rule_name),
JMESPathCheck('length(conditions)', 1),
JMESPathCheck('conditions[0].name', "RemoteAddress"),
JMESPathCheck('conditions[0].parameters.operator', 'GeoMatch'),
JMESPathCheck('conditions[0].parameters.matchValues[0]', 'TH'),
JMESPathCheck('conditions[0].parameters.matchValues[1]', 'US'),
JMESPathCheck('length(actions)', 1),
JMESPathCheck('actions[0].name', "UrlRewrite"),
JMESPathCheck('actions[0].parameters.sourcePattern', '/abc'),
JMESPathCheck('actions[0].parameters.destination', '/def')]
self.afd_rule_remove_action_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
0)
self.afd_rule_show_cmd(resource_group,
rule_set_name,
rule_name,
profile_name,
checks=rule_checks)
self.afd_rule_delete_cmd(resource_group,
rule_set_name,
rule_name,
profile_name)
rule_list_checks = [JMESPathCheck('length(@)', 0)]
self.afd_rule_list_cmd(resource_group, rule_set_name, profile_name, checks=rule_list_checks)
self.afd_rule_set_delete_cmd(resource_group, rule_set_name, profile_name)
| 55.620192
| 180
| 0.535742
| 1,005
| 11,569
| 5.869652
| 0.110448
| 0.045092
| 0.08137
| 0.08137
| 0.849127
| 0.81641
| 0.805221
| 0.796576
| 0.767757
| 0.751992
| 0
| 0.012638
| 0.350246
| 11,569
| 207
| 181
| 55.888889
| 0.772117
| 0.029043
| 0
| 0.767442
| 0
| 0.011628
| 0.206537
| 0.086747
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011628
| false
| 0.02907
| 0.023256
| 0
| 0.040698
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78eaf511019a09014f3676210ddb5a41e904c13d
| 116
|
py
|
Python
|
yawc/utils/dates.py
|
yekibud/yawc
|
973c086af18871c6e709acc29007bb512d0c3d17
|
[
"BSD-3-Clause"
] | null | null | null |
yawc/utils/dates.py
|
yekibud/yawc
|
973c086af18871c6e709acc29007bb512d0c3d17
|
[
"BSD-3-Clause"
] | null | null | null |
yawc/utils/dates.py
|
yekibud/yawc
|
973c086af18871c6e709acc29007bb512d0c3d17
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime
from pytz import utc
def utcnow():
return datetime.utcnow().replace(tzinfo=utc)
| 16.571429
| 48
| 0.758621
| 16
| 116
| 5.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155172
| 116
| 6
| 49
| 19.333333
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
78fe262b8075c0a3e327ac7b7f88aae580b8c26f
| 3,161
|
py
|
Python
|
benchmarks/predecessors_successors.py
|
mtreinish/retworkx-bench
|
3aeaf80cc7a8ba11e7db9d77b1d91857b2568cd3
|
[
"Apache-2.0"
] | null | null | null |
benchmarks/predecessors_successors.py
|
mtreinish/retworkx-bench
|
3aeaf80cc7a8ba11e7db9d77b1d91857b2568cd3
|
[
"Apache-2.0"
] | null | null | null |
benchmarks/predecessors_successors.py
|
mtreinish/retworkx-bench
|
3aeaf80cc7a8ba11e7db9d77b1d91857b2568cd3
|
[
"Apache-2.0"
] | null | null | null |
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
import os
import itertools
import retworkx
from .gr_parser import parse_gr_from_file
class PredecessorsSuccessorsUSANYCRoadGraph:
def setup(self):
gr_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "graphs", "USA-road-d.NY.gr"
)
self.graph = parse_gr_from_file(gr_file, directed=True)
def time_bfs_successors(self):
retworkx.bfs_successors(self.graph, 10240)
def time_successors(self):
self.graph.successors(10240)
def time_predecessors(self):
self.graph.predecessors(10240)
def time_ancestors(self):
retworkx.ancestors(self.graph, 10240)
def time_descendants(self):
retworkx.descendants(self.graph, 10240)
def time_number_weakly_connected_components(self):
retworkx.number_weakly_connected_components(self.graph)
def time_strongly_connected_components(self):
retworkx.strongly_connected_components(self.graph)
class PredecessorsSuccessorsRoadGraphWesternUSA:
def setup(self):
gr_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "graphs", "USA-road-t.W.gr.gz"
)
self.graph = parse_gr_from_file(gr_file, directed=True)
def time_bfs_successors(self):
retworkx.bfs_successors(self.graph, 10240)
def time_successors(self):
self.graph.successors(10240)
def time_predecessors(self):
self.graph.predecessors(10240)
def time_ancestors(self):
retworkx.ancestors(self.graph, 10240)
def time_descendants(self):
retworkx.descendants(self.graph, 10240)
def time_number_weakly_connected_components(self):
retworkx.number_weakly_connected_components(self.graph)
def time_strongly_connected_components(self):
retworkx.strongly_connected_components(self.graph)
class PredecessorsSuccessorsRoadGraphFullUSA:
timeout = 600
def setup(self):
gr_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "graphs", "USA-road-t.USA.gr.gz"
)
self.graph = parse_gr_from_file(gr_file, directed=True)
def time_bfs_successors(self):
retworkx.bfs_successors(self.graph, 10240)
def time_successors(self):
self.graph.successors(10240)
def time_predecessors(self):
self.graph.predecessors(10240)
def time_ancestors(self):
retworkx.ancestors(self.graph, 10240)
def time_descendants(self):
retworkx.descendants(self.graph, 10240)
def time_number_weakly_connected_components(self):
retworkx.number_weakly_connected_components(self.graph)
def time_strongly_connected_components(self):
retworkx.strongly_connected_components(self.graph)
| 30.68932
| 88
| 0.719076
| 402
| 3,161
| 5.435323
| 0.236318
| 0.098856
| 0.08238
| 0.070023
| 0.756064
| 0.756064
| 0.756064
| 0.756064
| 0.756064
| 0.756064
| 0
| 0.032094
| 0.191711
| 3,161
| 102
| 89
| 30.990196
| 0.823092
| 0.126859
| 0
| 0.784615
| 0
| 0
| 0.026163
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.369231
| false
| 0
| 0.061538
| 0
| 0.492308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60097b547230a30b1782f45efba885a831c4e94b
| 63
|
py
|
Python
|
pytorch_msssim_ag/__init__.py
|
agr17/pytorch-msssim
|
69aec4113ccceafa5568d1191e98c1db525c0c0f
|
[
"MIT"
] | null | null | null |
pytorch_msssim_ag/__init__.py
|
agr17/pytorch-msssim
|
69aec4113ccceafa5568d1191e98c1db525c0c0f
|
[
"MIT"
] | null | null | null |
pytorch_msssim_ag/__init__.py
|
agr17/pytorch-msssim
|
69aec4113ccceafa5568d1191e98c1db525c0c0f
|
[
"MIT"
] | null | null | null |
from .ssim import ssim, ms_ssim, SSIM, MS_SSIM, ssim_components
| 63
| 63
| 0.809524
| 11
| 63
| 4.363636
| 0.454545
| 0.25
| 0.416667
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 1
| 63
| 63
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
60222158527239ba47f746cbb9faa181cea5a956
| 284
|
py
|
Python
|
gitcheck/contexts.py
|
TutorSim/gitcheck
|
4c6182e8bd7b869c171ec98bc612012c6e192c04
|
[
"MIT"
] | null | null | null |
gitcheck/contexts.py
|
TutorSim/gitcheck
|
4c6182e8bd7b869c171ec98bc612012c6e192c04
|
[
"MIT"
] | null | null | null |
gitcheck/contexts.py
|
TutorSim/gitcheck
|
4c6182e8bd7b869c171ec98bc612012c6e192c04
|
[
"MIT"
] | null | null | null |
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../evsim')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../model')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
| 40.571429
| 88
| 0.707746
| 48
| 284
| 3.9375
| 0.25
| 0.285714
| 0.206349
| 0.222222
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0
| 0.011111
| 0.049296
| 284
| 6
| 89
| 47.333333
| 0.688889
| 0
| 0
| 0
| 0
| 0
| 0.066901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
60321a286c4efe4da183fc7b63d213ed4a81bd53
| 12,747
|
py
|
Python
|
core/models.py
|
eufmike/fibsem_seg_ml
|
81a9e437fd7131174631567b8b9afde4c39a2beb
|
[
"CC-BY-4.0"
] | null | null | null |
core/models.py
|
eufmike/fibsem_seg_ml
|
81a9e437fd7131174631567b8b9afde4c39a2beb
|
[
"CC-BY-4.0"
] | null | null | null |
core/models.py
|
eufmike/fibsem_seg_ml
|
81a9e437fd7131174631567b8b9afde4c39a2beb
|
[
"CC-BY-4.0"
] | null | null | null |
#%%
from tensorflow import keras
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Input, concatenate, Conv2D, Conv2DTranspose, Dropout, Flatten, Dense, Activation, Layer, Reshape, Permute, Lambda
from tensorflow.keras.layers import Conv3D, MaxPool3D, ZeroPadding3D
from tensorflow.keras.layers import Conv2D, MaxPool2D, UpSampling2D, ZeroPadding2D
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.optimizers import Adam, Adadelta
from tensorflow.keras import backend as K
from tensorflow.summary import scalar
def UNet(shape,
nClasses=1,
loss="binary_crossentropy",
lr=1e-5,
metrics=['accuracy']):
IMG_HEIGHT = shape[0]
IMG_WIDTH = shape[1]
IMG_CHANNELS = nClasses
# Build U-Net model
inputs = Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS))
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPool2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPool2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPool2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPool2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv2D(512, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6])
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3, up7])
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2, up8])
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1, up9])
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
output_shape = Model(inputs , conv9 ).output_shape
output = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(inputs, output)
model.compile(loss=loss, optimizer = Adam(lr = lr) , metrics=metrics)
model.summary()
return model
def UNet_hp(shape,
hparams,
hparams_list,
nClasses=1,
loss="binary_crossentropy",
lr=1e-5,
metrics=['accuracy'], ):
IMG_HEIGHT = shape[0]
IMG_WIDTH = shape[1]
IMG_CHANNELS = nClasses
HP_DROPOUT = hparams_list[0]
# Build U-Net model
inputs = Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS))
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPool2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPool2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPool2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(hparams[HP_DROPOUT])(conv4)
pool4 = MaxPool2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(hparams[HP_DROPOUT])(conv5)
up6 = Conv2D(512, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6])
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3, up7])
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2, up8])
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2,
activation = 'relu',
padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1, up9])
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
output_shape = Model(inputs , conv9 ).output_shape
output = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(inputs, output)
model.compile(loss=loss, optimizer = Adam(lr = lr) , metrics=metrics)
model.summary()
return model
# ---------------------------------------------------------------------
# Blocks
# ---------------------------------------------------------------------
def conv2d_block(
inputs,
use_batch_norm = True,
dropout=0.3,
filters=16,
kernel_size=(3,3),
activation='relu',
kernel_initializer='he_normal',
padding='same'):
c = Conv2D(filters, kernel_size, activation=activation, kernel_initializer=kernel_initializer, padding=padding)(inputs)
# the kernel is relu, we can skip "scale step" in batch normalization
# x = BatchNormalization(axis=-1, center=True, scale=False)(x)
if use_batch_norm:
c = BatchNormalization(axis=-1, center=True, scale=False)(c)
if dropout > 0.0:
c = Dropout(dropout)(c)
c = Conv2D(filters, kernel_size, activation=activation, kernel_initializer=kernel_initializer, padding=padding)(c)
if use_batch_norm:
c = BatchNormalization(axis=-1, center=True, scale=False)(c)
return c
def get_crop_shape(target, refer):
# width, the 3rd dimension
cw = target[2] - refer[2]
assert (cw >= 0)
if cw % 2 != 0:
cw1, cw2 = int(cw/2), int(cw/2) + 1
else:
cw1, cw2 = int(cw/2), int(cw/2)
# height, the 2nd dimension
ch = target[1] - refer[1]
assert (ch >= 0)
if ch % 2 != 0:
ch1, ch2 = int(ch/2), int(ch/2) + 1
else:
ch1, ch2 = int(ch/2), int(ch/2)
return (ch1, ch2), (cw1, cw2)
# ---------------------------------------------------------------------
def vanilla_unet(
shape,
dropout = 0.5,
num_layers = 4,
num_classes = 1,
filters = 64,
output_activation = 'sigmoid', # 'sigmoid' or 'softmax'
loss = "binary_crossentropy",
lr = 1e-5,
metrics = ['accuracy'],
summary = True,
use_batch_norm = False,
):
# Build U-Net model
inputs = Input((shape[0], shape[1], num_classes))
x = inputs
# Encoder
down_layers = []
for l in range(num_layers):
# add conv block
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=dropout, padding='same')
down_layers.append(x)
# MaxPooling
x = MaxPool2D((2, 2))(x)
filters = filters*2 # double the number of filters with each layer
x = Dropout(dropout)(x)
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=dropout, padding='same')
# Decoder
for conv in reversed(down_layers):
filters //= 2 # decreasing number of filters with each layer
# UpSampling
x = UpSampling2D(size = (2,2))(x)
x = concatenate([x, conv])
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=dropout, padding='same')
outputs = Conv2D(num_classes, (1, 1), activation=output_activation)(x)
model = Model(inputs=[inputs], outputs=[outputs])
# compile model
model.compile(loss=loss, optimizer = Adam(lr = lr) , metrics=metrics)
# print summary
if summary :
model.summary()
return model
def vanilla_unet_nodrop(
shape,
dropout = 0.5,
num_layers = 4,
num_classes = 1,
filters = 64,
output_activation = 'sigmoid', # 'sigmoid' or 'softmax'
loss = "binary_crossentropy",
lr = 1e-5,
metrics = ['accuracy'],
summary = True,
use_batch_norm = False,
):
# Build U-Net model
inputs = Input((shape[0], shape[1], num_classes))
x = inputs
# Encoder
down_layers = []
for l in range(num_layers):
# add conv block
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=0, padding='same')
down_layers.append(x)
# MaxPooling
x = MaxPool2D((2, 2))(x)
filters = filters*2 # double the number of filters with each layer
x = Dropout(dropout)(x)
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=dropout, padding='same')
# Decoder
for conv in reversed(down_layers):
filters //= 2 # decreasing number of filters with each layer
# UpSampling
x = UpSampling2D(size = (2,2))(x)
x = concatenate([x, conv])
x = conv2d_block(inputs=x, filters=filters, use_batch_norm=use_batch_norm, dropout=0, padding='same')
outputs = Conv2D(num_classes, (1, 1), activation=output_activation)(x)
model = Model(inputs=[inputs], outputs=[outputs])
# compile model
model.compile(loss=loss, optimizer = Adam(lr = lr) , metrics=metrics)
# print summary
if summary :
model.summary()
return model
| 39.586957
| 149
| 0.613242
| 1,520
| 12,747
| 5.013158
| 0.111842
| 0.073622
| 0.112205
| 0.147638
| 0.886483
| 0.869423
| 0.869423
| 0.86378
| 0.854331
| 0.854331
| 0
| 0.054115
| 0.234565
| 12,747
| 321
| 150
| 39.71028
| 0.726863
| 0.068173
| 0
| 0.788288
| 0
| 0
| 0.078092
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 1
| 0.027027
| false
| 0
| 0.040541
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
604095d1f25400814fa249c4825a61e6525e42cf
| 10,617
|
py
|
Python
|
test/vanilla/Expected/AcceptanceTests/BodyBoolean/bodyboolean/operations/bool_model_operations.py
|
iscai-msft/autorest.python
|
a9f38dd762fbc046ce6197bfabea2f56045d2957
|
[
"MIT"
] | null | null | null |
test/vanilla/Expected/AcceptanceTests/BodyBoolean/bodyboolean/operations/bool_model_operations.py
|
iscai-msft/autorest.python
|
a9f38dd762fbc046ce6197bfabea2f56045d2957
|
[
"MIT"
] | null | null | null |
test/vanilla/Expected/AcceptanceTests/BodyBoolean/bodyboolean/operations/bool_model_operations.py
|
iscai-msft/autorest.python
|
a9f38dd762fbc046ce6197bfabea2f56045d2957
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class BoolModelOperations(object):
"""BoolModelOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_true(
self, custom_headers=None, raw=False, **operation_config):
"""Get true Boolean value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
# Construct URL
url = self.get_true.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_true.metadata = {'url': '/bool/true'}
def put_true(
self, custom_headers=None, raw=False, **operation_config):
"""Set Boolean value true.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
bool_body = True
# Construct URL
url = self.put_true.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(bool_body, 'bool')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
put_true.metadata = {'url': '/bool/true'}
def get_false(
self, custom_headers=None, raw=False, **operation_config):
"""Get false Boolean value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
# Construct URL
url = self.get_false.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_false.metadata = {'url': '/bool/false'}
def put_false(
self, custom_headers=None, raw=False, **operation_config):
"""Set Boolean value false.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
bool_body = False
# Construct URL
url = self.put_false.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(bool_body, 'bool')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
put_false.metadata = {'url': '/bool/false'}
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null Boolean value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
# Construct URL
url = self.get_null.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_null.metadata = {'url': '/bool/null'}
def get_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid Boolean value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<bodyboolean.models.ErrorException>`
"""
# Construct URL
url = self.get_invalid.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('bool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_invalid.metadata = {'url': '/bool/invalid'}
| 35.989831
| 90
| 0.651408
| 1,087
| 10,617
| 6.206992
| 0.110396
| 0.046243
| 0.035571
| 0.029643
| 0.882763
| 0.876241
| 0.860234
| 0.860234
| 0.860234
| 0.829702
| 0
| 0.004164
| 0.253461
| 10,617
| 294
| 91
| 36.112245
| 0.847086
| 0.360836
| 0
| 0.768
| 0
| 0
| 0.047794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056
| false
| 0
| 0.016
| 0
| 0.168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
607eb2da6ded09d3c59231576898eea379e9cb61
| 3,263
|
py
|
Python
|
pandas/tests/sparse/frame/test_indexing.py
|
henriqueribeiro/pandas
|
996f361f8e6986ea1c65ccb164a4c585e1f4a027
|
[
"BSD-3-Clause"
] | 4
|
2021-03-02T19:57:18.000Z
|
2021-06-20T19:23:57.000Z
|
pandas/tests/sparse/frame/test_indexing.py
|
henriqueribeiro/pandas
|
996f361f8e6986ea1c65ccb164a4c585e1f4a027
|
[
"BSD-3-Clause"
] | 3
|
2018-09-24T22:09:28.000Z
|
2018-10-01T21:10:00.000Z
|
pandas/tests/sparse/frame/test_indexing.py
|
henriqueribeiro/pandas
|
996f361f8e6986ea1c65ccb164a4c585e1f4a027
|
[
"BSD-3-Clause"
] | 1
|
2021-06-21T07:51:29.000Z
|
2021-06-21T07:51:29.000Z
|
import pytest
import numpy as np
from pandas import SparseDataFrame, DataFrame
from pandas.util import testing as tm
pytestmark = pytest.mark.skip("Wrong SparseBlock initialization (GH 17386)")
@pytest.mark.parametrize('data', [
[[1, 1], [2, 2], [3, 3], [4, 4], [0, 0]],
[[1.0, 1.0], [2.0, 2.0], [3.0, 3.0], [4.0, 4.0], [np.nan, np.nan]],
[
[1.0, 1.0 + 1.0j],
[2.0 + 2.0j, 2.0],
[3.0, 3.0 + 3.0j],
[4.0 + 4.0j, 4.0],
[np.nan, np.nan]
]
])
@pytest.mark.xfail(reason='Wrong SparseBlock initialization (GH#17386)',
strict=True)
def test_where_with_numeric_data(data):
# GH 17386
lower_bound = 1.5
sparse = SparseDataFrame(data)
result = sparse.where(sparse > lower_bound)
dense = DataFrame(data)
dense_expected = dense.where(dense > lower_bound)
sparse_expected = SparseDataFrame(dense_expected)
tm.assert_frame_equal(result, dense_expected)
tm.assert_sp_frame_equal(result, sparse_expected)
@pytest.mark.parametrize('data', [
[[1, 1], [2, 2], [3, 3], [4, 4], [0, 0]],
[[1.0, 1.0], [2.0, 2.0], [3.0, 3.0], [4.0, 4.0], [np.nan, np.nan]],
[
[1.0, 1.0 + 1.0j],
[2.0 + 2.0j, 2.0],
[3.0, 3.0 + 3.0j],
[4.0 + 4.0j, 4.0],
[np.nan, np.nan]
]
])
@pytest.mark.parametrize('other', [
True,
-100,
0.1,
100.0 + 100.0j
])
@pytest.mark.xfail(reason='Wrong SparseBlock initialization (GH#17386)',
strict=True)
def test_where_with_numeric_data_and_other(data, other):
# GH 17386
lower_bound = 1.5
sparse = SparseDataFrame(data)
result = sparse.where(sparse > lower_bound, other)
dense = DataFrame(data)
dense_expected = dense.where(dense > lower_bound, other)
sparse_expected = SparseDataFrame(dense_expected,
default_fill_value=other)
tm.assert_frame_equal(result, dense_expected)
tm.assert_sp_frame_equal(result, sparse_expected)
@pytest.mark.xfail(reason='Wrong SparseBlock initialization (GH#17386)',
strict=True)
def test_where_with_bool_data():
# GH 17386
data = [[False, False], [True, True], [False, False]]
cond = True
sparse = SparseDataFrame(data)
result = sparse.where(sparse == cond)
dense = DataFrame(data)
dense_expected = dense.where(dense == cond)
sparse_expected = SparseDataFrame(dense_expected)
tm.assert_frame_equal(result, dense_expected)
tm.assert_sp_frame_equal(result, sparse_expected)
@pytest.mark.parametrize('other', [
True,
0,
0.1,
100.0 + 100.0j
])
@pytest.mark.xfail(reason='Wrong SparseBlock initialization (GH#17386)',
strict=True)
def test_where_with_bool_data_and_other(other):
# GH 17386
data = [[False, False], [True, True], [False, False]]
cond = True
sparse = SparseDataFrame(data)
result = sparse.where(sparse == cond, other)
dense = DataFrame(data)
dense_expected = dense.where(dense == cond, other)
sparse_expected = SparseDataFrame(dense_expected,
default_fill_value=other)
tm.assert_frame_equal(result, dense_expected)
tm.assert_sp_frame_equal(result, sparse_expected)
| 28.622807
| 76
| 0.619982
| 449
| 3,263
| 4.349666
| 0.129176
| 0.079877
| 0.012289
| 0.012289
| 0.918075
| 0.884281
| 0.884281
| 0.884281
| 0.884281
| 0.827957
| 0
| 0.069255
| 0.234447
| 3,263
| 113
| 77
| 28.876106
| 0.71257
| 0.010726
| 0
| 0.735632
| 0
| 0
| 0.072293
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 1
| 0.045977
| false
| 0
| 0.045977
| 0
| 0.091954
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7154f3321d9a1458084a296819d949e9256b425d
| 10,117
|
py
|
Python
|
db_learn/db_funcs.py
|
plaforgue/db_learn
|
2a17fc7638891814cd17ede6353a1a52e840fdbb
|
[
"MIT"
] | null | null | null |
db_learn/db_funcs.py
|
plaforgue/db_learn
|
2a17fc7638891814cd17ede6353a1a52e840fdbb
|
[
"MIT"
] | null | null | null |
db_learn/db_funcs.py
|
plaforgue/db_learn
|
2a17fc7638891814cd17ede6353a1a52e840fdbb
|
[
"MIT"
] | null | null | null |
# Authors: Pierre Laforgue <pierre.laforgue@telecom-paristech.fr>
#
# License: MIT
import numpy as np
from numba import njit
# In this file we detail several useful biasing functions, and ways to sample
# biased training samples.
#########################
# #
# BIASING FUNCTIONS #
# #
#########################
# We now detail several useful biasing functions.
@njit
def norm_in_bnd(x, a, b):
"""Norm between bounds indicator function"""
z = np.linalg.norm(x)
sup_a = a < z
inf_b = z < b
res = sup_a * inf_b
return res
@njit
def norm_in_bnd_vec(X, a, b):
"""Norm between bounds indicator function for 2-dimensional X"""
n = X.shape[0]
res = np.zeros(n, dtype=np.int64)
for i in range(n):
res[i] = norm_in_bnd(X[i, :], a, b)
return res
@njit
def norm_out_bnd(x, a, b):
"""Norm out of bounds indicator function"""
z = np.linalg.norm(x)
inf_a = z < a
sup_b = b < z
res = inf_a + sup_b
return res
@njit
def norm_out_bnd_vec(X, a, b):
"""Norm out of bounds indicator function for 2-dimensional X"""
n = X.shape[0]
res = np.zeros(n, dtype=np.int64)
for i in range(n):
res[i] = norm_out_bnd(X[i, :], a, b)
return res
@njit
def dim_in_bnd(x, dim, a, b):
"""Dimension between bounds indicator function"""
z = x[dim]
sup_a = a < z
inf_b = z < b
res = sup_a * inf_b
return res
@njit
def dim_in_bnd_vec(X, dim, a, b):
"""Dimension between bounds indicator function for 2-dimensional X"""
n = X.shape[0]
res = np.zeros(n, dtype=np.int64)
for i in range(n):
res[i] = dim_in_bnd(X[i, :], dim, a, b)
return res
@njit
def dim_out_bnd(x, dim, a, b):
"""Dimension out of bounds indicator function"""
z = x[dim]
inf_a = z < a
sup_b = b < z
res = inf_a + sup_b
return res
@njit
def dim_out_bnd_vec(X, dim, a, b):
"""Dimension out of bounds indicator function for 2-dimensional X"""
n = X.shape[0]
res = np.zeros(n, dtype=np.int64)
for i in range(n):
res[i] = dim_out_bnd(X[i, :], dim, a, b)
return res
@njit
def dim_in_set(x, dim, set_):
"""Dimension in particular set of values indicator function"""
z = x[dim]
res = 0
for item in set_:
if z == item:
res = 1
return res
@njit
def dim_in_set_vec(X, dim, set_):
"""Dimension in particular set of values indicator function for 2d X"""
n = X.shape[0]
res = np.zeros(n, dtype=np.int64)
for i in range(n):
res[i] = dim_in_set(X[i, :], dim, set_)
return res
#####################################
# #
# CONSTRAINED GAUSSIAN DATASETS #
# #
#####################################
# The following functions combine Gaussian random generation and the previously
# defined biasing functions to generate biased Gaussian samples.
@njit
def Gauss_norm_in_bnd(n, d, a, b):
"""Sample from Gaussian r.v. constrained by norm_in_bnd"""
X = np.zeros((n, d), dtype=np.float64)
count = 0
while count < n:
x = np.random.randn(d)
if norm_in_bnd(x, a, b,):
X[count, :] = x.copy()
count += 1
return X
@njit
def Gauss_norm_out_bnd(n, d, a, b):
"""Sample from Gaussian r.v. constrained by norm_out_bnd"""
X = np.zeros((n, d), dtype=np.float64)
count = 0
while count < n:
x = np.random.randn(d)
if norm_out_bnd(x, a, b,):
X[count, :] = x.copy()
count += 1
return X
@njit
def Gauss_dim_in_bnd(n, d, dim, a, b):
"""Sample from Gaussian r.v. constrained by dim_in_bnd"""
X = np.zeros((n, d), dtype=np.float64)
count = 0
while count < n:
x = np.random.randn(d)
if dim_in_bnd(x, dim, a, b,):
X[count, :] = x.copy()
count += 1
return X
@njit
def Gauss_dim_out_bnd(n, d, dim, a, b):
"""Sample from Gaussian r.v. constrained by dim_out_bnd"""
X = np.zeros((n, d), dtype=np.float64)
count = 0
while count < n:
x = np.random.randn(d)
if dim_out_bnd(x, dim, a, b,):
X[count, :] = x.copy()
count += 1
return X
def Gauss(n, d, dim='norm', a=0., b=1., in_=True):
"""Sample from Gaussian r.v. constrained by any function
Parameters
----------
n: int
Number of observations to sample
d: int
Dimension of the observations to sample
dim: str, default='norm'
Perform bias on norm or on dimension (to be specified by an int)
a: float, default=0.
Lower bound of the bias
b: float, default=1.
Upper bound of the bias
in_: bool, default=True
If True, sample Gaussian observations that satisfy norm (or specified)
dimension between a and b. Outside a and b otherwise.
Returns
-------
X: array of shape (n, d)
Biased Gaussian sample
"""
if dim == 'norm':
if in_:
X = Gauss_norm_in_bnd(n, d, a, b)
else:
X = Gauss_norm_out_bnd(n, d, a, b)
else:
if in_:
X = Gauss_dim_in_bnd(n, d, dim, a, b)
else:
X = Gauss_dim_out_bnd(n, d, dim, a, b)
return X
#############################################
# #
# SAMPLE WITH CONSTRAINTS FROM DATASETS #
# #
#############################################
# Another interesting feature is to sample from an existing dataset, with
# respect to a biasing function
@njit
def SampleX_norm_in_bnd(X, n, a, b):
"""Sample from X constrained by norm_in_bnd"""
n_max = norm_in_bnd_vec(X, a, b).sum()
if n > n_max:
print('Not enough data verifying condition, chosen instead:')
print(n_max)
return SampleX_norm_in_bnd(X, n_max, a, b)
else:
d = X.shape[1]
X_bias = np.zeros((n, d), dtype=np.float64)
count = 0
i = 0
while count < n:
x = X[i, :]
if norm_in_bnd(x, a, b,):
X_bias[count, :] = x.copy()
count += 1
i += 1
return X_bias
@njit
def SampleX_norm_out_bnd(X, n, a, b):
"""Sample from X constrained by norm_out_bnd"""
n_max = norm_out_bnd_vec(X, a, b).sum()
if n > n_max:
print('Not enough data verifying condition, chosen instead:')
print(n_max)
return SampleX_norm_out_bnd(X, n_max, a, b)
else:
d = X.shape[1]
X_bias = np.zeros((n, d), dtype=np.float64)
count = 0
i = 0
while count < n:
x = X[i, :]
if norm_out_bnd(x, a, b,):
X_bias[count, :] = x.copy()
count += 1
i += 1
return X_bias
@njit
def SampleX_dim_in_bnd(X, n, dim, a, b):
"""Sample from X constrained by dim_in_bnd"""
n_max = dim_in_bnd_vec(X, dim, a, b).sum()
if n > n_max:
print('Not enough data verifying condition, chosen instead:')
print(n_max)
return SampleX_dim_in_bnd(X, n_max, dim, a, b)
else:
d = X.shape[1]
X_bias = np.zeros((n, d), dtype=np.float64)
count = 0
i = 0
while count < n:
x = X[i, :]
if dim_in_bnd(x, dim, a, b,):
X_bias[count, :] = x.copy()
count += 1
i += 1
return X_bias
@njit
def SampleX_dim_out_bnd(X, n, dim, a, b):
"""Sample from X constrained by dim_out_bnd"""
n_max = dim_out_bnd_vec(X, dim, a, b).sum()
if n > n_max:
print('Not enough data verifying condition, chosen instead:')
print(n_max)
return SampleX_dim_out_bnd(X, n_max, dim, a, b)
else:
d = X.shape[1]
X_bias = np.zeros((n, d), dtype=np.float64)
count = 0
i = 0
while count < n:
x = X[i, :]
if dim_out_bnd(x, dim, a, b,):
X_bias[count, :] = x.copy()
count += 1
i += 1
return X_bias
@njit
def SampleX_dim_in_set(X, n, dim, set_):
"""Sample from X constrained by dim_in_bnd"""
n_max = dim_in_set_vec(X, dim, set_).sum()
if n > n_max:
print('Not enough data verifying condition, chosen instead:')
print(n_max)
return SampleX_dim_in_set(X, n_max, dim, set_)
else:
d = X.shape[1]
X_bias = np.zeros((n, d), dtype=np.float64)
count = 0
i = 0
while count < n:
x = X[i, :]
if dim_in_set(x, dim, set_):
X_bias[count, :] = x.copy()
count += 1
i += 1
return X_bias
def SampleX(X, n, dim='norm', a=0., b=1., in_=True):
"""Sample from X constrained by any function
Parameters
----------
X: array of shape (n_obs, d)
Original dataset from which observations are sampled
n: int
Number of observations to sample
dim: str, default='norm'
Perform bias on norm or on dimension (to be specified by an int)
a: float, default=0.
Lower bound of the bias
b: float, default=1.
Upper bound of the bias
in_: bool, default=True
If True, sample observations that satisfy norm (or specified)
dimension between a and b. Outside a and b otherwise
Returns
-------
X_bias: array of shape (n, d)
Set of observations sampled from X with specified bias
"""
X2 = X.copy()
np.random.shuffle(X2)
if dim == 'norm':
if in_:
X_bias = SampleX_norm_in_bnd(X2, n, a, b)
else:
X_bias = SampleX_norm_out_bnd(X2, n, a, b)
else:
if isinstance(in_, np.bool):
if in_:
X_bias = SampleX_dim_in_bnd(X2, n, dim, a, b)
else:
X_bias = SampleX_dim_out_bnd(X2, n, dim, a, b)
else:
X_bias = SampleX_dim_in_set(X2, n, dim, in_)
return X_bias
| 23.15103
| 79
| 0.530098
| 1,536
| 10,117
| 3.340495
| 0.095703
| 0.017151
| 0.021438
| 0.011694
| 0.844865
| 0.798675
| 0.771
| 0.744104
| 0.704736
| 0.665367
| 0
| 0.013035
| 0.332707
| 10,117
| 436
| 80
| 23.204128
| 0.747
| 0.292577
| 0
| 0.767932
| 0
| 0
| 0.041516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.008439
| 0
| 0.206751
| 0.042194
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71c2bce13074a89af31100aa9939a9a06c7080f8
| 21,481
|
py
|
Python
|
tests/components/kodi/test_config_flow.py
|
erogleva/core
|
994ae09f69afe772150a698953c0d7386a745de2
|
[
"Apache-2.0"
] | 3
|
2017-09-16T23:34:59.000Z
|
2021-12-20T11:11:27.000Z
|
tests/components/kodi/test_config_flow.py
|
erogleva/core
|
994ae09f69afe772150a698953c0d7386a745de2
|
[
"Apache-2.0"
] | 52
|
2020-07-14T14:12:26.000Z
|
2022-03-31T06:24:02.000Z
|
tests/components/kodi/test_config_flow.py
|
erogleva/core
|
994ae09f69afe772150a698953c0d7386a745de2
|
[
"Apache-2.0"
] | 2
|
2020-11-17T09:19:47.000Z
|
2020-12-16T03:56:09.000Z
|
"""Test the Kodi config flow."""
import pytest
from homeassistant import config_entries
from homeassistant.components.kodi.config_flow import (
CannotConnectError,
InvalidAuthError,
)
from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN
from .util import (
TEST_CREDENTIALS,
TEST_DISCOVERY,
TEST_HOST,
TEST_IMPORT,
TEST_WS_PORT,
UUID,
MockConnection,
MockWSConnection,
get_kodi_connection,
)
from tests.async_mock import AsyncMock, PropertyMock, patch
from tests.common import MockConfigEntry
@pytest.fixture
async def user_flow(hass):
"""Return a user-initiated flow after filling in host info."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
return result["flow_id"]
async def test_user_flow(hass, user_flow):
"""Test a successful user initiated flow."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
), patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "create_entry"
assert result["title"] == TEST_HOST["host"]
assert result["data"] == {
**TEST_HOST,
**TEST_WS_PORT,
"password": None,
"username": None,
"name": None,
"timeout": DEFAULT_TIMEOUT,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_valid_auth(hass, user_flow):
"""Test we handle valid auth."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=InvalidAuthError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
), patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_CREDENTIALS
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_HOST["host"]
assert result["data"] == {
**TEST_HOST,
**TEST_WS_PORT,
**TEST_CREDENTIALS,
"name": None,
"timeout": DEFAULT_TIMEOUT,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_valid_ws_port(hass, user_flow):
"""Test we handle valid websocket port."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
), patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_WS_PORT
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_HOST["host"]
assert result["data"] == {
**TEST_HOST,
**TEST_WS_PORT,
"password": None,
"username": None,
"name": None,
"timeout": DEFAULT_TIMEOUT,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_empty_ws_port(hass, user_flow):
"""Test we handle an empty websocket port input."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"ws_port": 0}
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_HOST["host"]
assert result["data"] == {
**TEST_HOST,
"ws_port": None,
"password": None,
"username": None,
"name": None,
"timeout": DEFAULT_TIMEOUT,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass, user_flow):
"""Test we handle invalid auth."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=InvalidAuthError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=InvalidAuthError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_CREDENTIALS
)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {"base": "invalid_auth"}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=CannotConnectError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_CREDENTIALS
)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {"base": "cannot_connect"}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=Exception,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_CREDENTIALS
)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {"base": "unknown"}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_CREDENTIALS
)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
async def test_form_cannot_connect_http(hass, user_flow):
"""Test we handle cannot connect over HTTP error."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=CannotConnectError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_exception_http(hass, user_flow):
"""Test we handle generic exception over HTTP."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=Exception,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "unknown"}
async def test_form_cannot_connect_ws(hass, user_flow):
"""Test we handle cannot connect over WebSocket error."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection, "connected", new_callable=PropertyMock(return_value=False)
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_WS_PORT
)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {"base": "cannot_connect"}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=CannotConnectError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_WS_PORT
)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_exception_ws(hass, user_flow):
"""Test we handle generic exception over WebSocket."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection, "connect", AsyncMock(side_effect=Exception)
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_WS_PORT
)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {"base": "unknown"}
async def test_discovery(hass):
"""Test discovery flow works."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "discovery_confirm"
with patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
flow_id=result["flow_id"], user_input={}
)
assert result["type"] == "create_entry"
assert result["title"] == "hostname"
assert result["data"] == {
**TEST_HOST,
**TEST_WS_PORT,
"password": None,
"username": None,
"name": "hostname",
"timeout": DEFAULT_TIMEOUT,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovery_cannot_connect_http(hass):
"""Test discovery aborts if cannot connect."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=CannotConnectError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_discovery_cannot_connect_ws(hass):
"""Test discovery aborts if cannot connect to websocket."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch.object(
MockWSConnection,
"connect",
AsyncMock(side_effect=CannotConnectError),
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
new=get_kodi_connection,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "ws_port"
assert result["errors"] == {}
async def test_discovery_exception_http(hass, user_flow):
"""Test we handle generic exception during discovery validation."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=Exception,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_discovery_invalid_auth(hass):
"""Test we handle invalid auth during discovery."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=InvalidAuthError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "credentials"
assert result["errors"] == {}
async def test_discovery_duplicate_data(hass):
"""Test discovery aborts if same mDNS packet arrives."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "discovery_confirm"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_discovery_updates_unique_id(hass):
"""Test a duplicate discovery id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=UUID,
data={"host": "dummy", "port": 11, "namename": "dummy.local."},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
assert entry.data["port"] == 8080
assert entry.data["name"] == "hostname"
async def test_form_import(hass):
"""Test we get the form with import source."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
return_value=True,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
), patch(
"homeassistant.components.kodi.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.kodi.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=TEST_IMPORT,
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_IMPORT["name"]
assert result["data"] == TEST_IMPORT
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import_invalid_auth(hass):
"""Test we handle invalid auth on import."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=InvalidAuthError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=TEST_IMPORT,
)
assert result["type"] == "abort"
assert result["reason"] == "invalid_auth"
async def test_form_import_cannot_connect(hass):
"""Test we handle cannot connect on import."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=CannotConnectError,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=TEST_IMPORT,
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_form_import_exception(hass):
"""Test we handle unknown exception on import."""
with patch(
"homeassistant.components.kodi.config_flow.Kodi.ping",
side_effect=Exception,
), patch(
"homeassistant.components.kodi.config_flow.get_kodi_connection",
return_value=MockConnection(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=TEST_IMPORT,
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
| 32.54697
| 85
| 0.655416
| 2,398
| 21,481
| 5.629274
| 0.056714
| 0.079117
| 0.14001
| 0.161197
| 0.905326
| 0.891992
| 0.883547
| 0.871176
| 0.853989
| 0.841988
| 0
| 0.001376
| 0.22187
| 21,481
| 659
| 86
| 32.596358
| 0.806222
| 0.00121
| 0
| 0.82342
| 0
| 0
| 0.255611
| 0.179555
| 0
| 0
| 0
| 0
| 0.193309
| 1
| 0
| false
| 0.007435
| 0.040892
| 0
| 0.042751
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
71d3028675ea546f5d6d662b5bf424cf82af1474
| 18,518
|
py
|
Python
|
test/test_cluster_service.py
|
jlundy2/service-auto-analyzer
|
91dbb7155eff84877b29a327d68491467befa168
|
[
"Apache-2.0"
] | null | null | null |
test/test_cluster_service.py
|
jlundy2/service-auto-analyzer
|
91dbb7155eff84877b29a327d68491467befa168
|
[
"Apache-2.0"
] | null | null | null |
test/test_cluster_service.py
|
jlundy2/service-auto-analyzer
|
91dbb7155eff84877b29a327d68491467befa168
|
[
"Apache-2.0"
] | null | null | null |
"""
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
"""
import unittest
from http import HTTPStatus
import sure # noqa
import httpretty
import commons.launch_objects as launch_objects
from utils import utils
from service.cluster_service import ClusterService
from test.test_service import TestService
class TestClusterService(TestService):
@utils.ignore_warnings
def test_find_clusters(self):
"""Test finding clusters"""
tests = [
{
"test_calls": [{"method": httpretty.GET,
"uri": "/1",
"status": HTTPStatus.OK,
}, ],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**(utils.get_fixture(
self.launch_wo_test_items, to_json=True))[0]),
forUpdate=False,
numberOfLogLines=-1),
"expected_result": []
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/1",
"status": HTTPStatus.OK,
}, ],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**(utils.get_fixture(
self.launch_w_test_items_wo_logs, to_json=True))[0]),
forUpdate=False,
numberOfLogLines=-1),
"expected_result": []
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
}, ],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**(utils.get_fixture(
self.launch_w_test_items_w_empty_logs, to_json=True)[0])),
forUpdate=False,
numberOfLogLines=-1),
"expected_result": []
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
},
{"method": httpretty.POST,
"uri": "/_bulk?refresh=true",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.cluster_update),
"rs": utils.get_fixture(
self.index_logs_rs),
}],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**utils.get_fixture(
self.launch_w_items_clustering, to_json=True)),
forUpdate=False,
numberOfLogLines=-1),
"expected_result": [
launch_objects.ClusterResult(
logId=4,
testItemId=2,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=5,
testItemId=5,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=9,
testItemId=6,
project=2,
launchId=1,
clusterId="")]
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
},
{"method": httpretty.POST,
"uri": "/_bulk?refresh=true",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.cluster_update_all_the_same),
"rs": utils.get_fixture(
self.index_logs_rs),
}],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**utils.get_fixture(
self.launch_w_items_clustering, to_json=True)),
forUpdate=False,
numberOfLogLines=2),
"expected_result": [
launch_objects.ClusterResult(
logId=4,
testItemId=2,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=5,
testItemId=5,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=9,
testItemId=6,
project=2,
launchId=1,
clusterId="1")]
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
},
{"method": httpretty.GET,
"uri": "/2/_search",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.search_logs_rq_first_group),
"rs": utils.get_fixture(
self.no_hits_search_rs),
},
{"method": httpretty.GET,
"uri": "/2/_search",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.search_logs_rq_second_group),
"rs": utils.get_fixture(
self.no_hits_search_rs),
},
{"method": httpretty.POST,
"uri": "/_bulk?refresh=true",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.cluster_update),
"rs": utils.get_fixture(
self.index_logs_rs),
}],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**utils.get_fixture(
self.launch_w_items_clustering, to_json=True)),
forUpdate=True,
numberOfLogLines=-1),
"expected_result": [
launch_objects.ClusterResult(
logId=4,
testItemId=2,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=5,
testItemId=5,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=9,
testItemId=6,
project=2,
launchId=1,
clusterId="")]
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
},
{"method": httpretty.GET,
"uri": "/2/_search",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.search_logs_rq_first_group),
"rs": utils.get_fixture(
self.one_hit_search_rs_clustering),
},
{"method": httpretty.GET,
"uri": "/2/_search",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.search_logs_rq_second_group),
"rs": utils.get_fixture(
self.one_hit_search_rs_clustering),
},
{"method": httpretty.POST,
"uri": "/_bulk?refresh=true",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.cluster_update_es_update),
"rs": utils.get_fixture(
self.index_logs_rs),
}],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**utils.get_fixture(
self.launch_w_items_clustering, to_json=True)),
forUpdate=True,
numberOfLogLines=-1),
"expected_result": [
launch_objects.ClusterResult(
logId=4,
testItemId=2,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=5,
testItemId=5,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=111,
testItemId=12,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=9,
testItemId=6,
project=2,
launchId=1,
clusterId="")]
},
{
"test_calls": [{"method": httpretty.GET,
"uri": "/2",
"status": HTTPStatus.OK,
},
{"method": httpretty.GET,
"uri": "/2/_search",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.search_logs_rq_first_group_2lines),
"rs": utils.get_fixture(
self.one_hit_search_rs_clustering),
},
{"method": httpretty.POST,
"uri": "/_bulk?refresh=true",
"status": HTTPStatus.OK,
"content_type": "application/json",
"rq": utils.get_fixture(
self.cluster_update_all_the_same_es_update),
"rs": utils.get_fixture(
self.index_logs_rs),
}],
"launch_info": launch_objects.LaunchInfoForClustering(
launch=launch_objects.Launch(
**utils.get_fixture(
self.launch_w_items_clustering, to_json=True)),
forUpdate=True,
numberOfLogLines=2),
"expected_result": [
launch_objects.ClusterResult(
logId=4,
testItemId=2,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=5,
testItemId=5,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=9,
testItemId=6,
project=2,
launchId=1,
clusterId="1"),
launch_objects.ClusterResult(
logId=111,
testItemId=12,
project=2,
launchId=1,
clusterId="1")]
},
]
for idx, test in enumerate(tests):
with sure.ensure('Error in the test case number: {0}', idx):
self._start_server(test["test_calls"])
config = self.get_default_search_config()
_cluster_service = ClusterService(app_config=self.app_config,
search_cfg=config)
response = _cluster_service.find_clusters(test["launch_info"])
response.should.have.length_of(len(test["expected_result"]))
cluster_ids_dict = {}
for i in range(len(response)):
test["expected_result"][i].logId.should.equal(response[i].logId)
if test["expected_result"][i].clusterId == "":
test["expected_result"][i].clusterId.should.equal(response[i].clusterId)
elif test["expected_result"][i].clusterId not in cluster_ids_dict:
cluster_ids_dict[test["expected_result"][i].clusterId] = response[i].clusterId
elif test["expected_result"][i].clusterId in cluster_ids_dict:
expected_cluster_id = cluster_ids_dict[test["expected_result"][i].clusterId]
expected_cluster_id.should.equal(response[i].clusterId)
for cluster_id in cluster_ids_dict:
test["test_calls"][-1]["rq"] = test["test_calls"][-1]["rq"].replace(
"\"cluster_id\":\"%s\"" % cluster_id,
"\"cluster_id\":\"%s\"" % cluster_ids_dict[cluster_id])
TestClusterService.shutdown_server(test["test_calls"])
if __name__ == '__main__':
unittest.main()
| 50.873626
| 102
| 0.34615
| 1,157
| 18,518
| 5.305099
| 0.152118
| 0.074128
| 0.068426
| 0.086673
| 0.776474
| 0.755621
| 0.755621
| 0.755621
| 0.741935
| 0.725644
| 0
| 0.015788
| 0.575872
| 18,518
| 363
| 103
| 51.013774
| 0.765724
| 0.032347
| 0
| 0.787425
| 0
| 0
| 0.071261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002994
| false
| 0
| 0.023952
| 0
| 0.02994
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0b2704170d9b8dbe6d70b006fd0f085e215d1c2
| 1,229
|
py
|
Python
|
src/graphs/tests/test_multisearch.py
|
seahrh/coding-interview
|
517d19e7e88c02acec4aa6336bc20206ce3f1897
|
[
"MIT"
] | null | null | null |
src/graphs/tests/test_multisearch.py
|
seahrh/coding-interview
|
517d19e7e88c02acec4aa6336bc20206ce3f1897
|
[
"MIT"
] | null | null | null |
src/graphs/tests/test_multisearch.py
|
seahrh/coding-interview
|
517d19e7e88c02acec4aa6336bc20206ce3f1897
|
[
"MIT"
] | null | null | null |
from graphs.multisearch import *
class TestMultiSearch:
def test_given_small_strings_of_length_one(self):
assert multisearch("bibs", ["b", "i", "s", "a"]) == {
"b": [0, 2],
"i": [1],
"s": [3],
"a": [],
}
def test_given_example_1(self):
assert multisearch(
"mississippi", ["is", "ppi", "hi", "sis", "i", "ssippi"]
) == {
"hi": [],
"i": [1, 4, 7, 10],
"is": [1, 4],
"ppi": [8],
"sis": [3],
"ssippi": [5],
}
class TestMultiSearchWithBigStringTrie:
def test_given_small_strings_of_length_one(self):
assert multisearch_with_bstrie("bibs", ["b", "i", "s", "a"]) == {
"b": [0, 2],
"i": [1],
"s": [3],
"a": [],
}
def test_given_example_1(self):
assert multisearch_with_bstrie(
"mississippi", ["is", "ppi", "hi", "sis", "i", "ssippi"]
) == {
"hi": [],
"i": [1, 4, 7, 10],
"is": [1, 4],
"ppi": [8],
"sis": [3],
"ssippi": [5],
}
| 26.717391
| 74
| 0.375915
| 120
| 1,229
| 3.666667
| 0.333333
| 0.063636
| 0.109091
| 0.077273
| 0.809091
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0
| 0.042373
| 0.423922
| 1,229
| 45
| 75
| 27.311111
| 0.579096
| 0
| 0
| 0.717949
| 0
| 0
| 0.096284
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.102564
| false
| 0
| 0.025641
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0e525a980a1191b7b3c18dbe6ba20b78a7a4024
| 33,800
|
py
|
Python
|
web/server/tests/unit/test_source_code_comment.py
|
LebedevRI/codechecker
|
f4548444851e19c8cc7b8fd621f3dcdf987d7140
|
[
"Apache-2.0"
] | null | null | null |
web/server/tests/unit/test_source_code_comment.py
|
LebedevRI/codechecker
|
f4548444851e19c8cc7b8fd621f3dcdf987d7140
|
[
"Apache-2.0"
] | null | null | null |
web/server/tests/unit/test_source_code_comment.py
|
LebedevRI/codechecker
|
f4548444851e19c8cc7b8fd621f3dcdf987d7140
|
[
"Apache-2.0"
] | 1
|
2021-01-27T21:45:14.000Z
|
2021-01-27T21:45:14.000Z
|
# -*- coding: utf-8 -*-
#
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
"""Tests for source code comments in source file."""
import os
import unittest
from codechecker_common.source_code_comment_handler import \
SourceCodeCommentHandler
class SourceCodeCommentTestCase(unittest.TestCase):
"""Tests for source code comments in source file."""
@classmethod
def setup_class(cls):
"""Initialize test source file references."""
cls.__test_src_dir = os.path.join(
os.path.dirname(__file__), 'source_code_comment_test_files')
cls.__tmp_srcfile_1 = open(os.path.join(cls.__test_src_dir,
'test_file_1'),
encoding='utf-8', errors="ignore")
cls.__tmp_srcfile_2 = open(os.path.join(cls.__test_src_dir,
'test_file_2'),
encoding='utf-8', errors="ignore")
cls.__tmp_srcfile_3 = open(os.path.join(cls.__test_src_dir,
'test_file_3'),
encoding='utf-8', errors="ignore")
@classmethod
def teardown_class(cls):
cls.__tmp_srcfile_1.close()
cls.__tmp_srcfile_2.close()
cls.__tmp_srcfile_3.close()
def test_src_comment_first_line(self):
"""Bug is reported for the first line."""
bug_line = 3
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertFalse(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 0)
def test_no_comment(self):
"""There is no comment above the bug line."""
bug_line = 9
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertFalse(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 0)
def test_no_src_comment_comment(self):
"""There is no source comment above the bug line."""
bug_line = 16
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_suppress [all] some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_multi_liner_all(self):
"""There is source code comment above the bug line."""
bug_line = 23
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some long comment',
'status': 'false_positive',
'line': '// codechecker_suppress [all] some long\n '
'// comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_one_liner_all(self):
"""There is source code comment above the bug line."""
bug_line = 29
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1', 'my_checker_2'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_suppress [my_checker_1, '
'my_checker_2] some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_multi_liner_all_2(self):
"""There is source code comment above the bug line."""
bug_line = 36
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my.checker_1', 'my.checker_2'},
'message': 'some really long comment',
'status': 'false_positive',
'line': '// codechecker_suppress [my.checker_1 '
'my.checker_2] some really\n // long comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_one_liner_some_checkers(self):
"""There is source code comment above the bug line."""
bug_line = 43
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my.Checker_1', 'my.Checker_2'},
'message': 'some really really long comment',
'status': 'false_positive',
'line': '// codechecker_suppress [my.Checker_1, '
'my.Checker_2] some really\n // really\n'
' // long comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_multi_liner_some_checkers(self):
"""There is source code comment above the bug line."""
bug_line = 50
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertFalse(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 0)
def test_comment_characters(self):
"""Check for different special comment characters."""
bug_line = 57
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my.checker_1', 'my.checker_2'},
'message': "i/';0 (*&^%$#@!)",
'status': 'false_positive',
'line': "// codechecker_suppress [my.checker_1, "
"my.checker_2]\n // i/';0 (*&^%$#@!)\n"}
self.assertDictEqual(expected, source_line_comments[0])
def test_fancy_comment_characters(self):
"""Check fancy comment."""
bug_line = 64
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': "áúőóüöáé [▬▬▬▬▬▬▬▬▬▬ஜ۩۞۩ஜ▬▬▬▬▬▬▬▬▬▬]",
'status': 'false_positive',
'line': '// codechecker_suppress [ my_checker_1 ]\n // '
'áúőóüöáé [▬▬▬▬▬▬▬▬▬▬ஜ۩۞۩ஜ▬▬▬▬▬▬▬▬▬▬]\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_no_fancy_comment(self):
"""Check no fancy comment."""
bug_line = 70
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_1, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': 'WARNING! source code comment is missing',
'status': 'false_positive',
'line': '// codechecker_suppress [ my_checker_1 ]\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_malformed_commment_format(self):
"""Check malformed comment."""
bug_line = 1
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_2,
bug_line)
self.assertFalse(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_2, bug_line)
self.assertEqual(len(source_line_comments), 0)
def test_source_code_comment(self):
"""Check source code comment."""
bug_line = 2
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_suppress [ all ] some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_false_positive_comment(self):
"""Check False positive comment."""
bug_line = 7
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_false_positive [ all ] '
'some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_intentional_comment(self):
"""Check Intentional comment."""
bug_line = 12
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some comment',
'status': 'intentional',
'line': '// codechecker_intentional [ all ] '
'some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_confirmed_comment(self):
"""Check Confirmed comment."""
bug_line = 17
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'all'},
'message': 'some comment',
'status': 'confirmed',
'line': '// codechecker_confirmed [ all ] some comment\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_multiple_comments(self):
"""Check multiple comment."""
bug_line = 23
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'intentional comment',
'status': 'intentional',
'line': '// codechecker_intentional [ my.checker_1 ] '
'intentional comment\n'},
{
'checkers': {'my.checker_2'},
'message': 'confirmed bug',
'status': 'confirmed',
'line': '// codechecker_confirmed [ my.checker_2 ] '
'confirmed bug\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_1')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[0]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[0]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_2')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.dummy')
self.assertEqual(len(current_line_comments), 0)
def test_multiple_multi_line_comments(self):
"""Check multi line long line comments."""
bug_line = 31
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'long intentional bug comment',
'status': 'intentional',
'line': '// codechecker_intentional [ my.checker_1 ] '
'long intentional\n // bug comment\n'},
{
'checkers': {'my.checker_2'},
'message': 'long confirmed bug comment',
'status': 'confirmed',
'line': '// codechecker_confirmed [ my.checker_2 ] '
'long confirmed\n // bug comment\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
def test_multiple_all_comments(self):
"""Check multiple comment."""
bug_line = 37
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = \
sc_handler.get_source_line_comments(self.__tmp_srcfile_3, bug_line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'intentional comment',
'status': 'intentional',
'line': '// codechecker_intentional [ my.checker_1 ] '
'intentional comment\n'},
{
'checkers': {'all'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_false_positive [ all ] '
'some comment\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_1')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[0]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[0]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.dummy')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[1]['status'])
def test_multiple_checker_name_comments(self):
"""
Check multiple comment where same checker name are given for multiple
source code comment.
"""
bug_line = 43
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_3,
bug_line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'intentional comment',
'status': 'intentional',
'line': '// codechecker_intentional [ my.checker_1 ] '
'intentional comment\n'
},
{
'checkers': {'my.checker_2', 'my.checker_1'},
'message': 'some comment',
'status': 'false_positive',
'line': '// codechecker_false_positive [ '
'my.checker_2, my.checker_1 ] some comment\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_1')
self.assertEqual(len(current_line_comments), 2)
def test_cstyle_comment(self):
"""
C style comment in one line.
/* codechecker_suppress [ my_checker_1 ] suppress comment */
"""
bug_line = 76
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_1,
bug_line)
for line in source_line_comments:
print(line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': 'suppress comment',
'status': 'false_positive',
'line': '/* codechecker_suppress [ my_checker_1 ] '
'suppress comment */\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_cstyle_comment_multi_line(self):
"""
Multi line C style comment.
/* codechecker_suppress [ my_checker_1 ]
some longer
comment */
"""
bug_line = 83
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_1,
bug_line)
for line in source_line_comments:
print(line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': 'some longer comment',
'status': 'false_positive',
'line': '/* codechecker_suppress [ my_checker_1 ]\n '
'some longer\n comment */\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_cstyle_comment_multi_nomsg(self):
"""
Multi line C style comment.
/* codechecker_suppress [ my_checker_1 ]
*/
"""
bug_line = 89
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_1,
bug_line)
for line in source_line_comments:
print(line)
self.assertEqual(len(source_line_comments), 1)
expected = [{
'checkers': {'my_checker_1'},
'message': 'WARNING! source code comment is missing',
'status': 'false_positive',
'line': '/* codechecker_suppress [ my_checker_1 ]'
'\n */\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
def test_cstyle_comment_multi_star(self):
"""
Multi line C style comment.
/* codechecker_suppress [ my_checker_1 ]
* multi line
* comment
* again
*/
"""
bug_line = 98
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_1,
bug_line)
for line in source_line_comments:
print('-======')
print(line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': 'multi line comment again',
'status': 'false_positive',
'line': "/* codechecker_suppress [ my_checker_1 ]\n * "
"multi line\n * comment\n * again\n */\n"}
self.assertDictEqual(expected, source_line_comments[0])
def test_cstyle_comment_multi_line_mismatch(self):
"""
Multi line C style comment start '/*' is in a different line
from the codechecker review status comment.
/*
codechecker_suppress [ my_checker_1 ]
multi line
comment
again
*/
"""
bug_line = 108
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_1,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_1,
bug_line)
for line in source_line_comments:
print('-======')
print(line)
self.assertEqual(len(source_line_comments), 1)
expected = {'checkers': {'my_checker_1'},
'message': 'multi line comment again',
'status': 'false_positive',
'line': ' codechecker_suppress [ my_checker_1 ]\n '
'multi line\n comment\n again\n */\n'}
self.assertDictEqual(expected, source_line_comments[0])
def test_cstyle_multi_comment_multi_line(self):
"""
Multi line C style comment with multiple review status comment.
/* codechecker_false_positive [ my.checker_2, my.checker_1 ] comment
codechecker_intentional [ my.checker_1 ] intentional comment */
"""
bug_line = 49
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_3,
bug_line)
for line in source_line_comments:
print(line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'intentional comment',
'status': 'intentional',
'line': 'codechecker_intentional [ my.checker_1 ] '
'intentional comment */\n'},
{
'checkers': {'my.checker_1', 'my.checker_2'},
'message': 'some comment',
'status': 'false_positive',
'line': '/* codechecker_false_positive [ '
'my.checker_2, my.checker_1 ] some comment\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_1')
self.assertEqual(len(current_line_comments), 2)
self.assertEqual(current_line_comments[0]['message'],
expected[0]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[0]['status'])
self.assertEqual(current_line_comments[1]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[1]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_2')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.dummy')
self.assertEqual(len(current_line_comments), 0)
def test_cstyle_multi_comment_multi_line_long(self):
"""
Multi line C style comment with multiple review status comment.
/* codechecker_false_positive [ my.checker_2, my.checker_1 ] comment
which
is
long
codechecker_intentional [ my.checker_1 ] intentional comment
long
again */
"""
bug_line = 60
sc_handler = SourceCodeCommentHandler()
res = sc_handler.has_source_line_comments(self.__tmp_srcfile_3,
bug_line)
self.assertTrue(res)
source_line_comments = sc_handler.get_source_line_comments(
self.__tmp_srcfile_3,
bug_line)
for line in source_line_comments:
print(line)
self.assertEqual(len(source_line_comments), 2)
expected = [{
'checkers': {'my.checker_1'},
'message': 'intentional comment long again',
'status': 'intentional',
'line': 'codechecker_intentional [ my.checker_1 ] '
'intentional comment\n long\n again */\n'},
{
'checkers': {'my.checker_1', 'my.checker_2'},
'message': 'comment which is long',
'status': 'false_positive',
'line': '/* codechecker_false_positive [ '
'my.checker_2, my.checker_1 ] comment\n '
'which\n is\n long\n'
}]
self.assertDictEqual(expected[0], source_line_comments[0])
self.assertDictEqual(expected[1], source_line_comments[1])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_1')
self.assertEqual(len(current_line_comments), 2)
self.assertEqual(current_line_comments[0]['message'],
expected[0]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[0]['status'])
self.assertEqual(current_line_comments[1]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[1]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.checker_2')
self.assertEqual(len(current_line_comments), 1)
self.assertEqual(current_line_comments[0]['message'],
expected[1]['message'])
self.assertEqual(current_line_comments[0]['status'],
expected[1]['status'])
current_line_comments = \
sc_handler.filter_source_line_comments(self.__tmp_srcfile_3,
bug_line,
'my.dummy')
self.assertEqual(len(current_line_comments), 0)
| 41.219512
| 79
| 0.535917
| 3,267
| 33,800
| 5.175696
| 0.048669
| 0.147614
| 0.16713
| 0.087173
| 0.907682
| 0.899344
| 0.887811
| 0.871784
| 0.853569
| 0.840263
| 0
| 0.015877
| 0.364586
| 33,800
| 819
| 80
| 41.269841
| 0.769288
| 0.065769
| 0
| 0.782462
| 0
| 0
| 0.145291
| 0.014172
| 0
| 0
| 0
| 0
| 0.200675
| 1
| 0.048904
| false
| 0
| 0.005059
| 0
| 0.055649
| 0.015177
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cb9aeb8da08be466f6a7e1e803a043a632f6fba
| 175
|
py
|
Python
|
api/stock/views/urls.py
|
DenerRodrigues/flask-restful-api-example
|
40aa0b5fcdeacf5241063953c478756c85b5811d
|
[
"MIT"
] | 1
|
2019-12-20T00:17:22.000Z
|
2019-12-20T00:17:22.000Z
|
api/stock/views/urls.py
|
DenerRodrigues/flask-restful-api-example
|
40aa0b5fcdeacf5241063953c478756c85b5811d
|
[
"MIT"
] | null | null | null |
api/stock/views/urls.py
|
DenerRodrigues/flask-restful-api-example
|
40aa0b5fcdeacf5241063953c478756c85b5811d
|
[
"MIT"
] | null | null | null |
from api.app import api
from api.stock.views import wish_views
api.add_resource(wish_views.WishListCreateView, '/wish/')
api.add_resource(wish_views.WishView, '/wish/<pk>/')
| 29.166667
| 57
| 0.788571
| 27
| 175
| 4.925926
| 0.444444
| 0.203008
| 0.210526
| 0.270677
| 0.345865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074286
| 175
| 5
| 58
| 35
| 0.820988
| 0
| 0
| 0
| 0
| 0
| 0.097143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e8250e3d31df5acb31717f9160f0791aa1dd1a5a
| 41,900
|
py
|
Python
|
PyIK/src/Protocol.py
|
yuliya-sm7/EvoArm
|
c82e8229333b2dcac3d18eb1d0518a16a23c945b
|
[
"CC-BY-3.0"
] | 110
|
2017-01-13T17:19:18.000Z
|
2022-02-20T06:50:03.000Z
|
PyIK/src/Protocol.py
|
yuliya-sm7/EvoArm
|
c82e8229333b2dcac3d18eb1d0518a16a23c945b
|
[
"CC-BY-3.0"
] | 1
|
2018-08-30T07:27:56.000Z
|
2018-08-30T07:27:56.000Z
|
PyIK/src/Protocol.py
|
yuliya-sm7/EvoArm
|
c82e8229333b2dcac3d18eb1d0518a16a23c945b
|
[
"CC-BY-3.0"
] | 47
|
2017-03-10T20:34:01.000Z
|
2021-11-18T03:44:06.000Z
|
# Auto-generated file
# Modify via protocol.yaml and/or Protocol.template.py
from __future__ import print_function
import struct
import time
import serial as pyserial
waitTime = 0.01
# Buffering system for bulk command issue
buffering = False
class TimeoutException(Exception):
pass
class ArduinoConfigException(Exception):
pass
# Connection and utility functions
def serialConnect(port, baud):
"""Creates, checks and returns a serial connection to a configured Arduino
"""
serial = pyserial.Serial(port, baud)
print ("Waiting for board on {0}".format(port), end=" ")
serial.timeout = 5
result = ''
attempts = 1
while result == '':
print ("Attempt {0}".format(attempts))
serial.close()
serial.open()
result = serial.readline()
attempts += 1
if not result.startswith("CommTest READY"):
raise ArduinoConfigException("ERROR: PC/Board software mismatch", result)
return serial
def findServos(serial):
"""When passed a serial connection to a configured Arduino, returns all
connected servos.
"""
servos = {'v1': [], 'v2': []}
# Retrieves a list of servos over serial, one per line
if serial is not None:
timer = time.clock()
# Set relatively high timeout for this section of board comms
serial.timeout = 3
command = "list"
l = struct.pack('b', len(command))
serial.write(l + command)
read = serial.readline()
x1num = int(read.split('=')[1])
for i in range(x1num):
str = serial.readline()
if str.startswith("ERROR"):
print ("Hardware Error during V1 servo listing: ", str)
else:
id = int(str)
servos['v1'].append(Servo(serial, 1, id))
read = serial.readline()
x2num = int(read.split('=')[1])
for i in range(x2num):
str = serial.readline()
if str.startswith("ERROR"):
print ("Hardware Error during V2 servo listing: ", str)
else:
id = int(str)
servos['v2'].append(Servo(serial, 2, id))
# Revert to shorter timeout
serial.timeout = 0.01
# Return our output servo dictionary
return servos
def waitFor(serial, num_bytes, timeout=0.1):
start = time.time()
# Busy wait until required bytes arrive or we timeout
while time.time() < start + timeout:
if serial.in_waiting >= num_bytes:
return
#raise TimeoutException('Timeout')
def tryRead(serial, num_bytes, timeout=0.05):
start = time.time()
read_bytes = 0
# Busy wait until required bytes arrive or we timeout
while time.time() < start + timeout:
if serial.in_waiting >= num_bytes:
return serial.read(num_bytes)
#raise TimeoutException('Timeout')
class CapacitiveSensor:
def __init__(self, serial):
self.serial = serial
def read(self, count):
command = 'c{0}'.format(chr(count))
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, count*4)
values = [0]*count
for i in xrange(count):
values[i] = struct.unpack('i', self.serial.read(4))[0]
return values
class Servo:
def __init__(self, serial, protocol_ver, id):
self.protocol = protocol_ver
self.id = id
self.serial = serial
self.data = {'pos': 150}
# Templated commands
def setID(self, val):
command = 's\x02{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getID <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setID <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setReturnDelay(self, val):
command = 's\x03{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getReturnDelay <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setReturnDelay <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCWLimit(self, val):
command = 's\x04{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCWLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCWLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCCWLimit(self, val):
command = 's\x05{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCCWLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCCWLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setTempLimit(self, val):
command = 's\x06{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getTempLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setTempLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setLowVoltageLimit(self, val):
command = 's\x07{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getLowVoltageLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setLowVoltageLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setHighVoltageLimit(self, val):
command = 's\x08{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getHighVoltageLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setHighVoltageLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setMaxTorque(self, val):
command = 's\x09{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getMaxTorque <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setMaxTorque <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setMaxTorque(self, val):
command = 's\x0A{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getMaxTorque <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setMaxTorque <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setAlarmFlags(self, val):
command = 's\x0B{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getAlarmFlags <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setAlarmFlags <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setShutdownFlags(self, val):
command = 's\x0C{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getShutdownFlags <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setShutdownFlags <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setTorqueEnable(self, val):
command = 's\x0D{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getTorqueEnable <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setTorqueEnable <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setLED(self, val):
command = 's\x0E{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getLED <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setLED <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setGoalPosition(self, val):
command = 's\x14{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getGoalPosition <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setGoalPosition <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setGoalSpeed(self, val):
command = 's\x15{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getGoalSpeed <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setGoalSpeed <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setTorqueLimit(self, val):
command = 's\x16{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getTorqueLimit <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setTorqueLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCWMargin(self, val):
command = 's\x17{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCWMargin <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCWMargin <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCCWMargin(self, val):
command = 's\x18{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCCWMargin <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCCWMargin <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCWSlope(self, val):
command = 's\x19{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCWSlope <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCWSlope <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setCCWSlope(self, val):
command = 's\x1A{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('i', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getCCWSlope <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setCCWSlope <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def setPunch(self, val):
command = 's\x1B{pver}{packedid}{arg}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id),
arg = struct.pack('f', val)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 2)
# Response
res = 'Timeout in getPunch <dx{0}:{1}>'.format(self.protocol, self.id)
while self.serial.in_waiting > 0:
res = self.serial.readline()
if res.startswith('ERROR'):
print ('Servo Error in setPunch <dx{0}:{1}> {2}'.format(self.protocol, self.id, res))
return False
return True
def getModelNumber(self):
command = 'g\x00{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getModelNumber <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getModelNumber <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getFirmwareVersion(self):
command = 'g\x01{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getFirmwareVersion <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getFirmwareVersion <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getID(self):
command = 'g\x02{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getID <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getID <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getReturnDelay(self):
command = 'g\x03{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getReturnDelay <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getReturnDelay <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCWLimit(self):
command = 'g\x04{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCWLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCWLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCCWLimit(self):
command = 'g\x05{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCCWLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCCWLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getTempLimit(self):
command = 'g\x06{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getTempLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getTempLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getLowVoltageLimit(self):
command = 'g\x07{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getLowVoltageLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getLowVoltageLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getHighVoltageLimit(self):
command = 'g\x08{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getHighVoltageLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getHighVoltageLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getMaxTorque(self):
command = 'g\x09{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getMaxTorque <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getMaxTorque <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getMaxTorque(self):
command = 'g\x0A{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getMaxTorque <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getMaxTorque <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getAlarmFlags(self):
command = 'g\x0B{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getAlarmFlags <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getAlarmFlags <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getShutdownFlags(self):
command = 'g\x0C{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getShutdownFlags <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getShutdownFlags <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getTorqueEnable(self):
command = 'g\x0D{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getTorqueEnable <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return bool(val)
except Exception as e:
print ('Bad receive in getTorqueEnable <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getLED(self):
command = 'g\x0E{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getLED <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getLED <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getVoltage(self):
command = 'g\x0F{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getVoltage <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getVoltage <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getPosition(self):
command = 'g\x10{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getPosition <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getPosition <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getLoad(self):
command = 'g\x11{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getLoad <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getLoad <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getTemperature(self):
command = 'g\x12{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getTemperature <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getTemperature <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getSpeed(self):
command = 'g\x13{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getSpeed <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getSpeed <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getGoalPosition(self):
command = 'g\x14{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getGoalPosition <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getGoalPosition <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getGoalSpeed(self):
command = 'g\x15{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getGoalSpeed <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getGoalSpeed <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getTorqueLimit(self):
command = 'g\x16{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getTorqueLimit <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getTorqueLimit <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCWMargin(self):
command = 'g\x17{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCWMargin <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCWMargin <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCCWMargin(self):
command = 'g\x18{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCCWMargin <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCCWMargin <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCWSlope(self):
command = 'g\x19{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCWSlope <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCWSlope <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getCCWSlope(self):
command = 'g\x1A{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getCCWSlope <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return val
except Exception as e:
print ('Bad receive in getCCWSlope <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getPunch(self):
command = 'g\x1B{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getPunch <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('f', arg)[0]
return val
except Exception as e:
print ('Bad receive in getPunch <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
def getMoving(self):
command = 'g\x1C{pver}{packedid}'.format(
pver = self.protocol,
packedid = struct.pack('B', self.id)
)
l = struct.pack('b', len(command))
self.serial.write(l+command)
waitFor(self.serial, 5)
# Retreive response
try:
arg = tryRead(self.serial, 1)
if arg != 'k':
print ('Servo Error in getMoving <dx{0}:{1}> E{2}'.format(self.protocol, self.id, self.serial.readline()))
return None
arg = tryRead(self.serial, 4)
val = struct.unpack('i', arg)[0]
return bool(val)
except Exception as e:
print ('Bad receive in getMoving <dx{0}:{1}> {2}'.format(self.protocol, self.id, e))
return None
#def getServos():
| 37.713771
| 132
| 0.53611
| 5,079
| 41,900
| 4.414058
| 0.05001
| 0.105268
| 0.050047
| 0.098131
| 0.864802
| 0.823676
| 0.822695
| 0.819394
| 0.814309
| 0.814309
| 0
| 0.01976
| 0.322434
| 41,900
| 1,110
| 133
| 37.747748
| 0.769919
| 0.033413
| 0
| 0.693498
| 1
| 0
| 0.143578
| 0.028577
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0.002064
| 0.004128
| 0
| 0.205366
| 0.086687
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c17611c9a0a29fca94e061e4813cd08e6d36798
| 435
|
py
|
Python
|
Platforms/Web/Processing/Api/Discord/Configs/Gameenabledchannels/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 2
|
2017-09-14T08:07:55.000Z
|
2021-05-18T05:05:05.000Z
|
Platforms/Web/Processing/Api/Discord/Configs/Gameenabledchannels/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 111
|
2018-04-15T14:32:14.000Z
|
2021-03-28T21:06:29.000Z
|
Platforms/Web/Processing/Api/Discord/Configs/Gameenabledchannels/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 1
|
2018-04-15T13:24:44.000Z
|
2018-04-15T13:24:44.000Z
|
import Platforms.Web.Processing.Api.Discord.Configs.Gameenabledchannels.create as create
import Platforms.Web.Processing.Api.Discord.Configs.Gameenabledchannels.delete as delete
import Platforms.Web.Processing.Api.Discord.Configs.Gameenabledchannels.errors as errors
import Platforms.Web.Processing.Api.Discord.Configs.Gameenabledchannels.get as get
import Platforms.Web.Processing.Api.Discord.Configs.Gameenabledchannels.main as main
| 72.5
| 88
| 0.873563
| 55
| 435
| 6.909091
| 0.254545
| 0.197368
| 0.236842
| 0.368421
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0
| 0
| 0
| 0
| 0.045977
| 435
| 5
| 89
| 87
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
1c24a0baca3190f3377354af1640b63ea5cc31a0
| 140
|
py
|
Python
|
tests/ut/test_assets.py
|
cicheck/dfd
|
b02752f958cfea2f85222e2b4b3ba7e265a6152d
|
[
"MIT"
] | null | null | null |
tests/ut/test_assets.py
|
cicheck/dfd
|
b02752f958cfea2f85222e2b4b3ba7e265a6152d
|
[
"MIT"
] | 2
|
2021-12-31T17:44:20.000Z
|
2021-12-31T19:51:11.000Z
|
tests/ut/test_assets.py
|
cicheck/dfd
|
b02752f958cfea2f85222e2b4b3ba7e265a6152d
|
[
"MIT"
] | null | null | null |
import dlib
from dfd import assets
def test_face_landmarks_model_loads():
dlib.shape_predictor(str(assets.FACE_LANDMARKS_MODEL_PATH))
| 20
| 63
| 0.828571
| 21
| 140
| 5.142857
| 0.714286
| 0.240741
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 140
| 7
| 63
| 20
| 0.864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1c7ae6f0b545f9be814b6838e856424cac2be0e9
| 4,822
|
py
|
Python
|
test/programytest/parser/template/graph_tests/rdf_tests/test_get.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | null | null | null |
test/programytest/parser/template/graph_tests/rdf_tests/test_get.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | null | null | null |
test/programytest/parser/template/graph_tests/rdf_tests/test_get.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | 1
|
2020-02-21T17:58:05.000Z
|
2020-02-21T17:58:05.000Z
|
import xml.etree.ElementTree as ET
from programytest.parser.template.graph_tests.graph_test_client import TemplateGraphTestClient
class TemplateGraphGetTests(TemplateGraphTestClient):
def test_tuples_single_var_single_result(self):
self.test_bot.brain.rdf.add_entity("MONKEY", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("MONKEY", "HASFUR", "true")
self.test_bot.brain.rdf.add_entity("ZEBRA", "LEGS", "4")
self.test_bot.brain.rdf.add_entity("ELEPHANT", "TRUNK", "true")
template = ET.fromstring("""
<template>
<get var="?x">
<tuple>
<select>
<vars>?x</vars>
<q><subj>?x</subj><pred>LEGS</pred><obj>2</obj></q>
</select>
</tuple>
</get>
</template>
""")
self.assertIsNotNone(template)
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
result = ast.resolve(self.test_bot, self.test_clientid)
self.assertIsNotNone(result)
self.assertEquals("MONKEY", result)
def test_tuples_multi_vars_single_results(self):
self.test_bot.brain.rdf.add_entity("MONKEY", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("MONKEY", "HASFUR", "true")
self.test_bot.brain.rdf.add_entity("ZEBRA", "LEGS", "4")
self.test_bot.brain.rdf.add_entity("ELEPHANT", "TRUNK", "true")
template = ET.fromstring("""
<template>
<get var="?x ?y">
<tuple>
<select>
<vars>?x ?y</vars>
<q><subj>?x</subj><pred>?y</pred><obj>2</obj></q>
</select>
</tuple>
</get>
</template>
""")
self.assertIsNotNone(template)
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
result = ast.resolve(self.test_bot, self.test_clientid)
self.assertIsNotNone(result)
self.assertEquals("MONKEY LEGS", result)
def test_tuples_single_var_multi_resultss(self):
self.test_bot.brain.rdf.add_entity("MONKEY", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("MONKEY", "HASFUR", "true")
self.test_bot.brain.rdf.add_entity("ZEBRA", "LEGS", "4")
self.test_bot.brain.rdf.add_entity("BIRD", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("ELEPHANT", "TRUNK", "true")
template = ET.fromstring("""
<template>
<get var="?x">
<tuple>
<select>
<vars>?x</vars>
<q><subj>?x</subj><pred>LEGS</pred><obj>2</obj></q>
</select>
</tuple>
</get>
</template>
""")
self.assertIsNotNone(template)
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
result = ast.resolve(self.test_bot, self.test_clientid)
self.assertIsNotNone(result)
self.assertEquals("MONKEY BIRD", result)
def test_tuples_multi_vars_multi_resultss(self):
self.test_bot.brain.rdf.add_entity("MONKEY", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("MONKEY", "HASFUR", "true")
self.test_bot.brain.rdf.add_entity("ZEBRA", "LEGS", "4")
self.test_bot.brain.rdf.add_entity("BIRD", "LEGS", "2")
self.test_bot.brain.rdf.add_entity("ELEPHANT", "TRUNK", "true")
template = ET.fromstring("""
<template>
<get var="?x ?y">
<tuple>
<select>
<vars>?x ?y</vars>
<q><subj>?x</subj><pred>?y</pred><obj>2</obj></q>
</select>
</tuple>
</get>
</template>
""")
self.assertIsNotNone(template)
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
result = ast.resolve(self.test_bot, self.test_clientid)
self.assertIsNotNone(result)
self.assertEquals("MONKEY LEGS BIRD LEGS", result)
def test_get_from_tuple_from_get_from_var(self):
template = ET.fromstring("""
<template>
<think>
<set var="head">[["?x", "TEST1"]]</set>
</think>
<get var="?x">
<tuple>
<get var="head"/>
</tuple>
</get>
</template>
""")
self.assertIsNotNone(template)
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
result = ast.resolve(self.test_bot, self.test_clientid)
self.assertIsNotNone(result)
self.assertEquals("TEST1", result)
| 33.957746
| 94
| 0.55226
| 526
| 4,822
| 4.89924
| 0.119772
| 0.086923
| 0.098176
| 0.111758
| 0.86884
| 0.851766
| 0.830035
| 0.830035
| 0.830035
| 0.830035
| 0
| 0.004731
| 0.298631
| 4,822
| 141
| 95
| 34.198582
| 0.757244
| 0
| 0
| 0.853448
| 0
| 0.034483
| 0.369971
| 0.041477
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.043103
| false
| 0
| 0.017241
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98cd3e9372dd6ff02a0d70c421700904adebc83f
| 5,985
|
py
|
Python
|
models/imagenet/arch/fbnet_v2/blocks_factory.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
models/imagenet/arch/fbnet_v2/blocks_factory.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
models/imagenet/arch/fbnet_v2/blocks_factory.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
FBNet model building blocks factory
"""
import mobile_cv.arch.utils.helper as hp
import mobile_cv.common.misc.registry as registry
from . import basic_blocks as bb
from . import irf_block
PRIMITIVES = registry.Registry("blocks_factory")
_PRIMITIVES = {
"skip": lambda in_channels, out_channels, stride, **kwargs: bb.Identity(
in_channels, out_channels, stride
),
"conv": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(conv_args={"stride": stride}, kwargs=kwargs)
),
"conv_k1": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 1, "padding": 0},
kwargs=kwargs,
)
),
"conv_k3": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 3, "padding": 1},
kwargs=kwargs,
)
),
"conv_k5": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 5, "padding": 2},
kwargs=kwargs,
)
),
"conv_hs": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride}, relu_args="hswish", kwargs=kwargs
)
),
"conv_k1_hs": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 1, "padding": 0},
relu_args="hswish",
kwargs=kwargs,
)
),
"conv_k3_hs": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 3, "padding": 1},
relu_args="hswish",
kwargs=kwargs,
)
),
"conv_k5_hs": lambda in_channels, out_channels, stride, **kwargs: bb.ConvBNRelu(
in_channels,
out_channels,
**hp.merge(
conv_args={"stride": stride, "kernel_size": 5, "padding": 2},
relu_args="hswish",
kwargs=kwargs,
)
),
"irf": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels, out_channels, stride=stride, **kwargs
),
"ir_k3": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels, out_channels, stride=stride, kernel_size=3, **kwargs
),
"ir_k3_g2": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels,
out_channels,
stride=stride,
kernel_size=3,
pw_groups=2,
**kwargs
),
"ir_k5": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels, out_channels, stride=stride, kernel_size=5, **kwargs
),
"ir_k5_g2": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock( # noqa
in_channels,
out_channels,
stride=stride,
kernel_size=5,
pw_groups=2,
**kwargs
),
"ir_k3_hs": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels,
out_channels,
stride=stride,
kernel_size=3,
relu_args="hswish",
**kwargs
),
"ir_k5_hs": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels,
out_channels,
stride=stride,
kernel_size=5,
relu_args="hswish",
**kwargs
),
"ir_k3_se": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels,
out_channels,
stride=stride,
kernel_size=3,
se_args="se",
**kwargs
),
"ir_k5_se": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock(
in_channels,
out_channels,
stride=stride,
kernel_size=5,
se_args="se",
**kwargs
),
"ir_k3_sehsig": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock( # noqa
in_channels,
out_channels,
stride=stride,
kernel_size=3,
se_args="se_hsig",
**kwargs
),
"ir_k5_sehsig": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock( # noqa
in_channels,
out_channels,
stride=stride,
kernel_size=5,
se_args="se_hsig",
**kwargs
),
"ir_k3_sehsig_hs": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock( # noqa
in_channels,
out_channels,
stride=stride,
kernel_size=3,
relu_args="hswish",
se_args="se_hsig",
**kwargs
),
"ir_k5_sehsig_hs": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRFBlock( # noqa
in_channels,
out_channels,
stride=stride,
kernel_size=5,
relu_args="hswish",
se_args="se_hsig",
**kwargs
),
"ir_pool": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRPoolBlock( # noqa
in_channels,
out_channels,
stride=stride,
**hp.filter_kwargs(irf_block.IRPoolBlock, kwargs)
),
"ir_pool_hs": lambda in_channels, out_channels, stride, **kwargs: irf_block.IRPoolBlock( # noqa
in_channels,
out_channels,
stride=stride,
relu_args="hswish",
**hp.filter_kwargs(irf_block.IRPoolBlock, kwargs)
),
}
PRIMITIVES.register_dict(_PRIMITIVES)
| 31.5
| 102
| 0.600835
| 693
| 5,985
| 4.890332
| 0.109668
| 0.141635
| 0.184125
| 0.297433
| 0.872234
| 0.832989
| 0.802892
| 0.779876
| 0.760696
| 0.746533
| 0
| 0.01089
| 0.278864
| 5,985
| 189
| 103
| 31.666667
| 0.774328
| 0.026901
| 0
| 0.747191
| 0
| 0
| 0.078168
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022472
| 0
| 0.022472
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.